[{"data":1,"prerenderedAt":4394},["ShallowReactive",2],{"/en-us/blog/tags/devsecops-platform/":3,"navigation-en-us":20,"banner-en-us":450,"footer-en-us":467,"DevSecOps platform-tag-page-en-us":676},{"_path":4,"_dir":5,"_draft":6,"_partial":6,"_locale":7,"content":8,"config":11,"_id":13,"_type":14,"title":15,"_source":16,"_file":17,"_stem":18,"_extension":19},"/en-us/blog/tags/devsecops-platform","tags",false,"",{"tag":9,"tagSlug":10},"DevSecOps platform","devsecops-platform",{"template":12},"BlogTag","content:en-us:blog:tags:devsecops-platform.yml","yaml","Devsecops Platform","content","en-us/blog/tags/devsecops-platform.yml","en-us/blog/tags/devsecops-platform","yml",{"_path":21,"_dir":22,"_draft":6,"_partial":6,"_locale":7,"data":23,"_id":446,"_type":14,"title":447,"_source":16,"_file":448,"_stem":449,"_extension":19},"/shared/en-us/main-navigation","en-us",{"logo":24,"freeTrial":29,"sales":34,"login":39,"items":44,"search":377,"minimal":408,"duo":427,"pricingDeployment":436},{"config":25},{"href":26,"dataGaName":27,"dataGaLocation":28},"/","gitlab logo","header",{"text":30,"config":31},"Get free trial",{"href":32,"dataGaName":33,"dataGaLocation":28},"https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com&glm_content=default-saas-trial/","free trial",{"text":35,"config":36},"Talk to sales",{"href":37,"dataGaName":38,"dataGaLocation":28},"/sales/","sales",{"text":40,"config":41},"Sign in",{"href":42,"dataGaName":43,"dataGaLocation":28},"https://gitlab.com/users/sign_in/","sign in",[45,89,187,192,298,358],{"text":46,"config":47,"cards":49,"footer":72},"Platform",{"dataNavLevelOne":48},"platform",[50,56,64],{"title":46,"description":51,"link":52},"The most comprehensive AI-powered DevSecOps Platform",{"text":53,"config":54},"Explore our Platform",{"href":55,"dataGaName":48,"dataGaLocation":28},"/platform/",{"title":57,"description":58,"link":59},"GitLab Duo (AI)","Build software faster with AI at every stage of development",{"text":60,"config":61},"Meet GitLab Duo",{"href":62,"dataGaName":63,"dataGaLocation":28},"/gitlab-duo/","gitlab duo ai",{"title":65,"description":66,"link":67},"Why GitLab","10 reasons why Enterprises choose GitLab",{"text":68,"config":69},"Learn more",{"href":70,"dataGaName":71,"dataGaLocation":28},"/why-gitlab/","why gitlab",{"title":73,"items":74},"Get started with",[75,80,85],{"text":76,"config":77},"Platform Engineering",{"href":78,"dataGaName":79,"dataGaLocation":28},"/solutions/platform-engineering/","platform engineering",{"text":81,"config":82},"Developer Experience",{"href":83,"dataGaName":84,"dataGaLocation":28},"/developer-experience/","Developer experience",{"text":86,"config":87},"MLOps",{"href":88,"dataGaName":86,"dataGaLocation":28},"/topics/devops/the-role-of-ai-in-devops/",{"text":90,"left":91,"config":92,"link":94,"lists":98,"footer":169},"Product",true,{"dataNavLevelOne":93},"solutions",{"text":95,"config":96},"View all Solutions",{"href":97,"dataGaName":93,"dataGaLocation":28},"/solutions/",[99,124,148],{"title":100,"description":101,"link":102,"items":107},"Automation","CI/CD and automation to accelerate deployment",{"config":103},{"icon":104,"href":105,"dataGaName":106,"dataGaLocation":28},"AutomatedCodeAlt","/solutions/delivery-automation/","automated software delivery",[108,112,116,120],{"text":109,"config":110},"CI/CD",{"href":111,"dataGaLocation":28,"dataGaName":109},"/solutions/continuous-integration/",{"text":113,"config":114},"AI-Assisted Development",{"href":62,"dataGaLocation":28,"dataGaName":115},"AI assisted development",{"text":117,"config":118},"Source Code Management",{"href":119,"dataGaLocation":28,"dataGaName":117},"/solutions/source-code-management/",{"text":121,"config":122},"Automated Software Delivery",{"href":105,"dataGaLocation":28,"dataGaName":123},"Automated software delivery",{"title":125,"description":126,"link":127,"items":132},"Security","Deliver code faster without compromising security",{"config":128},{"href":129,"dataGaName":130,"dataGaLocation":28,"icon":131},"/solutions/security-compliance/","security and compliance","ShieldCheckLight",[133,138,143],{"text":134,"config":135},"Application Security Testing",{"href":136,"dataGaName":137,"dataGaLocation":28},"/solutions/application-security-testing/","Application security testing",{"text":139,"config":140},"Software Supply Chain Security",{"href":141,"dataGaLocation":28,"dataGaName":142},"/solutions/supply-chain/","Software supply chain security",{"text":144,"config":145},"Software Compliance",{"href":146,"dataGaName":147,"dataGaLocation":28},"/solutions/software-compliance/","software compliance",{"title":149,"link":150,"items":155},"Measurement",{"config":151},{"icon":152,"href":153,"dataGaName":154,"dataGaLocation":28},"DigitalTransformation","/solutions/visibility-measurement/","visibility and measurement",[156,160,164],{"text":157,"config":158},"Visibility & Measurement",{"href":153,"dataGaLocation":28,"dataGaName":159},"Visibility and Measurement",{"text":161,"config":162},"Value Stream Management",{"href":163,"dataGaLocation":28,"dataGaName":161},"/solutions/value-stream-management/",{"text":165,"config":166},"Analytics & Insights",{"href":167,"dataGaLocation":28,"dataGaName":168},"/solutions/analytics-and-insights/","Analytics and insights",{"title":170,"items":171},"GitLab for",[172,177,182],{"text":173,"config":174},"Enterprise",{"href":175,"dataGaLocation":28,"dataGaName":176},"/enterprise/","enterprise",{"text":178,"config":179},"Small Business",{"href":180,"dataGaLocation":28,"dataGaName":181},"/small-business/","small business",{"text":183,"config":184},"Public Sector",{"href":185,"dataGaLocation":28,"dataGaName":186},"/solutions/public-sector/","public sector",{"text":188,"config":189},"Pricing",{"href":190,"dataGaName":191,"dataGaLocation":28,"dataNavLevelOne":191},"/pricing/","pricing",{"text":193,"config":194,"link":196,"lists":200,"feature":285},"Resources",{"dataNavLevelOne":195},"resources",{"text":197,"config":198},"View all resources",{"href":199,"dataGaName":195,"dataGaLocation":28},"/resources/",[201,234,257],{"title":202,"items":203},"Getting started",[204,209,214,219,224,229],{"text":205,"config":206},"Install",{"href":207,"dataGaName":208,"dataGaLocation":28},"/install/","install",{"text":210,"config":211},"Quick start guides",{"href":212,"dataGaName":213,"dataGaLocation":28},"/get-started/","quick setup checklists",{"text":215,"config":216},"Learn",{"href":217,"dataGaLocation":28,"dataGaName":218},"https://university.gitlab.com/","learn",{"text":220,"config":221},"Product documentation",{"href":222,"dataGaName":223,"dataGaLocation":28},"https://docs.gitlab.com/","product documentation",{"text":225,"config":226},"Best practice videos",{"href":227,"dataGaName":228,"dataGaLocation":28},"/getting-started-videos/","best practice videos",{"text":230,"config":231},"Integrations",{"href":232,"dataGaName":233,"dataGaLocation":28},"/integrations/","integrations",{"title":235,"items":236},"Discover",[237,242,247,252],{"text":238,"config":239},"Customer success stories",{"href":240,"dataGaName":241,"dataGaLocation":28},"/customers/","customer success stories",{"text":243,"config":244},"Blog",{"href":245,"dataGaName":246,"dataGaLocation":28},"/blog/","blog",{"text":248,"config":249},"Remote",{"href":250,"dataGaName":251,"dataGaLocation":28},"https://handbook.gitlab.com/handbook/company/culture/all-remote/","remote",{"text":253,"config":254},"TeamOps",{"href":255,"dataGaName":256,"dataGaLocation":28},"/teamops/","teamops",{"title":258,"items":259},"Connect",[260,265,270,275,280],{"text":261,"config":262},"GitLab Services",{"href":263,"dataGaName":264,"dataGaLocation":28},"/services/","services",{"text":266,"config":267},"Community",{"href":268,"dataGaName":269,"dataGaLocation":28},"/community/","community",{"text":271,"config":272},"Forum",{"href":273,"dataGaName":274,"dataGaLocation":28},"https://forum.gitlab.com/","forum",{"text":276,"config":277},"Events",{"href":278,"dataGaName":279,"dataGaLocation":28},"/events/","events",{"text":281,"config":282},"Partners",{"href":283,"dataGaName":284,"dataGaLocation":28},"/partners/","partners",{"backgroundColor":286,"textColor":287,"text":288,"image":289,"link":293},"#2f2a6b","#fff","Insights for the future of software development",{"altText":290,"config":291},"the source promo card",{"src":292},"https://res.cloudinary.com/about-gitlab-com/image/upload/v1758208064/dzl0dbift9xdizyelkk4.svg",{"text":294,"config":295},"Read the latest",{"href":296,"dataGaName":297,"dataGaLocation":28},"/the-source/","the source",{"text":299,"config":300,"lists":302},"Company",{"dataNavLevelOne":301},"company",[303],{"items":304},[305,310,316,318,323,328,333,338,343,348,353],{"text":306,"config":307},"About",{"href":308,"dataGaName":309,"dataGaLocation":28},"/company/","about",{"text":311,"config":312,"footerGa":315},"Jobs",{"href":313,"dataGaName":314,"dataGaLocation":28},"/jobs/","jobs",{"dataGaName":314},{"text":276,"config":317},{"href":278,"dataGaName":279,"dataGaLocation":28},{"text":319,"config":320},"Leadership",{"href":321,"dataGaName":322,"dataGaLocation":28},"/company/team/e-group/","leadership",{"text":324,"config":325},"Team",{"href":326,"dataGaName":327,"dataGaLocation":28},"/company/team/","team",{"text":329,"config":330},"Handbook",{"href":331,"dataGaName":332,"dataGaLocation":28},"https://handbook.gitlab.com/","handbook",{"text":334,"config":335},"Investor relations",{"href":336,"dataGaName":337,"dataGaLocation":28},"https://ir.gitlab.com/","investor relations",{"text":339,"config":340},"Trust Center",{"href":341,"dataGaName":342,"dataGaLocation":28},"/security/","trust center",{"text":344,"config":345},"AI Transparency Center",{"href":346,"dataGaName":347,"dataGaLocation":28},"/ai-transparency-center/","ai transparency center",{"text":349,"config":350},"Newsletter",{"href":351,"dataGaName":352,"dataGaLocation":28},"/company/contact/","newsletter",{"text":354,"config":355},"Press",{"href":356,"dataGaName":357,"dataGaLocation":28},"/press/","press",{"text":359,"config":360,"lists":361},"Contact us",{"dataNavLevelOne":301},[362],{"items":363},[364,367,372],{"text":35,"config":365},{"href":37,"dataGaName":366,"dataGaLocation":28},"talk to sales",{"text":368,"config":369},"Get help",{"href":370,"dataGaName":371,"dataGaLocation":28},"/support/","get help",{"text":373,"config":374},"Customer portal",{"href":375,"dataGaName":376,"dataGaLocation":28},"https://customers.gitlab.com/customers/sign_in/","customer portal",{"close":378,"login":379,"suggestions":386},"Close",{"text":380,"link":381},"To search repositories and projects, login to",{"text":382,"config":383},"gitlab.com",{"href":42,"dataGaName":384,"dataGaLocation":385},"search login","search",{"text":387,"default":388},"Suggestions",[389,391,395,397,401,405],{"text":57,"config":390},{"href":62,"dataGaName":57,"dataGaLocation":385},{"text":392,"config":393},"Code Suggestions (AI)",{"href":394,"dataGaName":392,"dataGaLocation":385},"/solutions/code-suggestions/",{"text":109,"config":396},{"href":111,"dataGaName":109,"dataGaLocation":385},{"text":398,"config":399},"GitLab on AWS",{"href":400,"dataGaName":398,"dataGaLocation":385},"/partners/technology-partners/aws/",{"text":402,"config":403},"GitLab on Google Cloud",{"href":404,"dataGaName":402,"dataGaLocation":385},"/partners/technology-partners/google-cloud-platform/",{"text":406,"config":407},"Why GitLab?",{"href":70,"dataGaName":406,"dataGaLocation":385},{"freeTrial":409,"mobileIcon":414,"desktopIcon":419,"secondaryButton":422},{"text":410,"config":411},"Start free trial",{"href":412,"dataGaName":33,"dataGaLocation":413},"https://gitlab.com/-/trials/new/","nav",{"altText":415,"config":416},"Gitlab Icon",{"src":417,"dataGaName":418,"dataGaLocation":413},"https://res.cloudinary.com/about-gitlab-com/image/upload/v1758203874/jypbw1jx72aexsoohd7x.svg","gitlab icon",{"altText":415,"config":420},{"src":421,"dataGaName":418,"dataGaLocation":413},"https://res.cloudinary.com/about-gitlab-com/image/upload/v1758203875/gs4c8p8opsgvflgkswz9.svg",{"text":423,"config":424},"Get Started",{"href":425,"dataGaName":426,"dataGaLocation":413},"https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/compare/gitlab-vs-github/","get started",{"freeTrial":428,"mobileIcon":432,"desktopIcon":434},{"text":429,"config":430},"Learn more about GitLab Duo",{"href":62,"dataGaName":431,"dataGaLocation":413},"gitlab duo",{"altText":415,"config":433},{"src":417,"dataGaName":418,"dataGaLocation":413},{"altText":415,"config":435},{"src":421,"dataGaName":418,"dataGaLocation":413},{"freeTrial":437,"mobileIcon":442,"desktopIcon":444},{"text":438,"config":439},"Back to pricing",{"href":190,"dataGaName":440,"dataGaLocation":413,"icon":441},"back to pricing","GoBack",{"altText":415,"config":443},{"src":417,"dataGaName":418,"dataGaLocation":413},{"altText":415,"config":445},{"src":421,"dataGaName":418,"dataGaLocation":413},"content:shared:en-us:main-navigation.yml","Main Navigation","shared/en-us/main-navigation.yml","shared/en-us/main-navigation",{"_path":451,"_dir":22,"_draft":6,"_partial":6,"_locale":7,"title":452,"button":453,"image":458,"config":462,"_id":464,"_type":14,"_source":16,"_file":465,"_stem":466,"_extension":19},"/shared/en-us/banner","is now in public beta!",{"text":454,"config":455},"Try the Beta",{"href":456,"dataGaName":457,"dataGaLocation":28},"/gitlab-duo/agent-platform/","duo banner",{"altText":459,"config":460},"GitLab Duo Agent Platform",{"src":461},"https://res.cloudinary.com/about-gitlab-com/image/upload/v1753720689/somrf9zaunk0xlt7ne4x.svg",{"layout":463},"release","content:shared:en-us:banner.yml","shared/en-us/banner.yml","shared/en-us/banner",{"_path":468,"_dir":22,"_draft":6,"_partial":6,"_locale":7,"data":469,"_id":672,"_type":14,"title":673,"_source":16,"_file":674,"_stem":675,"_extension":19},"/shared/en-us/main-footer",{"text":470,"source":471,"edit":477,"contribute":482,"config":487,"items":492,"minimal":664},"Git is a trademark of Software Freedom Conservancy and our use of 'GitLab' is under license",{"text":472,"config":473},"View page source",{"href":474,"dataGaName":475,"dataGaLocation":476},"https://gitlab.com/gitlab-com/marketing/digital-experience/about-gitlab-com/","page source","footer",{"text":478,"config":479},"Edit this page",{"href":480,"dataGaName":481,"dataGaLocation":476},"https://gitlab.com/gitlab-com/marketing/digital-experience/about-gitlab-com/-/blob/main/content/","web ide",{"text":483,"config":484},"Please contribute",{"href":485,"dataGaName":486,"dataGaLocation":476},"https://gitlab.com/gitlab-com/marketing/digital-experience/about-gitlab-com/-/blob/main/CONTRIBUTING.md/","please contribute",{"twitter":488,"facebook":489,"youtube":490,"linkedin":491},"https://twitter.com/gitlab","https://www.facebook.com/gitlab","https://www.youtube.com/channel/UCnMGQ8QHMAnVIsI3xJrihhg","https://www.linkedin.com/company/gitlab-com",[493,515,571,600,634],{"title":46,"links":494,"subMenu":498},[495],{"text":9,"config":496},{"href":55,"dataGaName":497,"dataGaLocation":476},"devsecops platform",[499],{"title":188,"links":500},[501,505,510],{"text":502,"config":503},"View plans",{"href":190,"dataGaName":504,"dataGaLocation":476},"view plans",{"text":506,"config":507},"Why Premium?",{"href":508,"dataGaName":509,"dataGaLocation":476},"/pricing/premium/","why premium",{"text":511,"config":512},"Why Ultimate?",{"href":513,"dataGaName":514,"dataGaLocation":476},"/pricing/ultimate/","why ultimate",{"title":516,"links":517},"Solutions",[518,523,525,527,532,537,541,544,548,553,555,558,561,566],{"text":519,"config":520},"Digital transformation",{"href":521,"dataGaName":522,"dataGaLocation":476},"/topics/digital-transformation/","digital transformation",{"text":134,"config":524},{"href":136,"dataGaName":134,"dataGaLocation":476},{"text":123,"config":526},{"href":105,"dataGaName":106,"dataGaLocation":476},{"text":528,"config":529},"Agile development",{"href":530,"dataGaName":531,"dataGaLocation":476},"/solutions/agile-delivery/","agile delivery",{"text":533,"config":534},"Cloud transformation",{"href":535,"dataGaName":536,"dataGaLocation":476},"/topics/cloud-native/","cloud transformation",{"text":538,"config":539},"SCM",{"href":119,"dataGaName":540,"dataGaLocation":476},"source code management",{"text":109,"config":542},{"href":111,"dataGaName":543,"dataGaLocation":476},"continuous integration & delivery",{"text":545,"config":546},"Value stream management",{"href":163,"dataGaName":547,"dataGaLocation":476},"value stream management",{"text":549,"config":550},"GitOps",{"href":551,"dataGaName":552,"dataGaLocation":476},"/solutions/gitops/","gitops",{"text":173,"config":554},{"href":175,"dataGaName":176,"dataGaLocation":476},{"text":556,"config":557},"Small business",{"href":180,"dataGaName":181,"dataGaLocation":476},{"text":559,"config":560},"Public sector",{"href":185,"dataGaName":186,"dataGaLocation":476},{"text":562,"config":563},"Education",{"href":564,"dataGaName":565,"dataGaLocation":476},"/solutions/education/","education",{"text":567,"config":568},"Financial services",{"href":569,"dataGaName":570,"dataGaLocation":476},"/solutions/finance/","financial services",{"title":193,"links":572},[573,575,577,579,582,584,586,588,590,592,594,596,598],{"text":205,"config":574},{"href":207,"dataGaName":208,"dataGaLocation":476},{"text":210,"config":576},{"href":212,"dataGaName":213,"dataGaLocation":476},{"text":215,"config":578},{"href":217,"dataGaName":218,"dataGaLocation":476},{"text":220,"config":580},{"href":222,"dataGaName":581,"dataGaLocation":476},"docs",{"text":243,"config":583},{"href":245,"dataGaName":246,"dataGaLocation":476},{"text":238,"config":585},{"href":240,"dataGaName":241,"dataGaLocation":476},{"text":248,"config":587},{"href":250,"dataGaName":251,"dataGaLocation":476},{"text":261,"config":589},{"href":263,"dataGaName":264,"dataGaLocation":476},{"text":253,"config":591},{"href":255,"dataGaName":256,"dataGaLocation":476},{"text":266,"config":593},{"href":268,"dataGaName":269,"dataGaLocation":476},{"text":271,"config":595},{"href":273,"dataGaName":274,"dataGaLocation":476},{"text":276,"config":597},{"href":278,"dataGaName":279,"dataGaLocation":476},{"text":281,"config":599},{"href":283,"dataGaName":284,"dataGaLocation":476},{"title":299,"links":601},[602,604,606,608,610,612,614,618,623,625,627,629],{"text":306,"config":603},{"href":308,"dataGaName":301,"dataGaLocation":476},{"text":311,"config":605},{"href":313,"dataGaName":314,"dataGaLocation":476},{"text":319,"config":607},{"href":321,"dataGaName":322,"dataGaLocation":476},{"text":324,"config":609},{"href":326,"dataGaName":327,"dataGaLocation":476},{"text":329,"config":611},{"href":331,"dataGaName":332,"dataGaLocation":476},{"text":334,"config":613},{"href":336,"dataGaName":337,"dataGaLocation":476},{"text":615,"config":616},"Sustainability",{"href":617,"dataGaName":615,"dataGaLocation":476},"/sustainability/",{"text":619,"config":620},"Diversity, inclusion and belonging (DIB)",{"href":621,"dataGaName":622,"dataGaLocation":476},"/diversity-inclusion-belonging/","Diversity, inclusion and belonging",{"text":339,"config":624},{"href":341,"dataGaName":342,"dataGaLocation":476},{"text":349,"config":626},{"href":351,"dataGaName":352,"dataGaLocation":476},{"text":354,"config":628},{"href":356,"dataGaName":357,"dataGaLocation":476},{"text":630,"config":631},"Modern Slavery Transparency Statement",{"href":632,"dataGaName":633,"dataGaLocation":476},"https://handbook.gitlab.com/handbook/legal/modern-slavery-act-transparency-statement/","modern slavery transparency statement",{"title":635,"links":636},"Contact Us",[637,640,642,644,649,654,659],{"text":638,"config":639},"Contact an expert",{"href":37,"dataGaName":38,"dataGaLocation":476},{"text":368,"config":641},{"href":370,"dataGaName":371,"dataGaLocation":476},{"text":373,"config":643},{"href":375,"dataGaName":376,"dataGaLocation":476},{"text":645,"config":646},"Status",{"href":647,"dataGaName":648,"dataGaLocation":476},"https://status.gitlab.com/","status",{"text":650,"config":651},"Terms of use",{"href":652,"dataGaName":653,"dataGaLocation":476},"/terms/","terms of use",{"text":655,"config":656},"Privacy statement",{"href":657,"dataGaName":658,"dataGaLocation":476},"/privacy/","privacy statement",{"text":660,"config":661},"Cookie preferences",{"dataGaName":662,"dataGaLocation":476,"id":663,"isOneTrustButton":91},"cookie preferences","ot-sdk-btn",{"items":665},[666,668,670],{"text":650,"config":667},{"href":652,"dataGaName":653,"dataGaLocation":476},{"text":655,"config":669},{"href":657,"dataGaName":658,"dataGaLocation":476},{"text":660,"config":671},{"dataGaName":662,"dataGaLocation":476,"id":663,"isOneTrustButton":91},"content:shared:en-us:main-footer.yml","Main Footer","shared/en-us/main-footer.yml","shared/en-us/main-footer",{"allPosts":677,"featuredPost":4373,"totalPagesCount":4392,"initialPosts":4393},[678,703,727,745,767,788,812,833,854,874,895,915,937,958,979,998,1017,1040,1061,1082,1102,1121,1141,1162,1184,1205,1224,1244,1264,1285,1305,1326,1346,1367,1389,1409,1430,1449,1469,1488,1508,1527,1550,1570,1589,1610,1629,1650,1669,1689,1710,1730,1749,1767,1785,1804,1823,1841,1859,1879,1897,1915,1935,1954,1975,1997,2017,2036,2055,2074,2093,2112,2132,2151,2169,2187,2207,2225,2246,2267,2288,2306,2326,2345,2363,2381,2402,2422,2441,2461,2480,2500,2520,2540,2558,2579,2598,2617,2636,2655,2675,2694,2713,2736,2756,2774,2793,2813,2832,2853,2873,2894,2915,2934,2954,2972,2991,3010,3029,3049,3067,3087,3106,3125,3145,3164,3184,3205,3224,3244,3263,3282,3303,3320,3342,3362,3381,3400,3420,3439,3457,3475,3494,3512,3531,3550,3569,3589,3608,3627,3647,3664,3682,3700,3718,3737,3757,3777,3797,3815,3833,3850,3869,3888,3907,3926,3946,3963,3981,4000,4018,4036,4057,4076,4094,4113,4132,4152,4172,4190,4209,4228,4247,4261,4275,4288,4302,4316,4330,4344,4360],{"_path":679,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":680,"content":688,"config":696,"_id":699,"_type":14,"title":700,"_source":16,"_file":701,"_stem":702,"_extension":19},"/en-us/blog/3-signs-your-team-is-ready-to-uplevel-security-controls-in-gitlab",{"title":681,"description":682,"ogTitle":681,"ogDescription":682,"noIndex":6,"ogImage":683,"ogUrl":684,"ogSiteName":685,"ogType":686,"canonicalUrls":684,"schema":687},"3 signs your team is ready to uplevel security controls in GitLab","Learn when to upgrade your GitLab security practices, from permission management to compliance adherence. Discover key features in GitLab Premium that scale with your team.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749664299/Blog/Hero%20Images/AdobeStock_887599633.jpg","https://about.gitlab.com/blog/3-signs-your-team-is-ready-to-uplevel-security-controls-in-gitlab","https://about.gitlab.com","article","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"3 signs your team is ready to uplevel security controls in GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Julie Griffin\"}],\n        \"datePublished\": \"2024-12-18\",\n      }",{"title":681,"description":682,"authors":689,"heroImage":683,"date":691,"body":692,"category":693,"tags":694},[690],"Julie Griffin","2024-12-18","Most teams start with basic security practices, such as branch protection and simple access controls. But, there's often a moment when teams realize they need more. It could be when they land their first enterprise client, when they start handling sensitive data, or when they experience their first security incident.\n\nIf you’re unsure whether you’re ready to upgrade your security, here are a few signs you’ve outgrown your security needs:\n\n* You spend more time managing permissions than writing code.  \n* Security reviews create development bottlenecks.  \n* You can't definitively say who changed what and when.  \n* You're unsure if security policies are consistently followed.\n\nDo any of these signs resonate with you? Let's explore how teams typically mature their security practices as they grow. \n\n## 1. Your organization requires advanced access controls.\n\nManual permission management can be tedious and prone to errors. While it’s manageable for a team of three, it becomes much more complex as your team grows to 15, 30, or 100 developers. \n\nThe disadvantages of an intricate permission system are two-fold:\n\n1. It becomes more likely that accidental or unauthorized changes are made to critical parts of the codebase.  \n2. Managing complex permissions takes time that could be spent developing valuable software for the business. \n\n### Features that automate permission management\n\nScaling teams need features that automate permission management. GitLab Premium offers enterprise-grade Agile planning features that provide [organizational hierarchies](https://about.gitlab.com/blog/best-practices-to-set-up-organizational-hierarchies-that-scale/), enabling advanced permissions management at the group or sub-group level. \n\nThis, alongside features like [Protected Branches](https://docs.gitlab.com/ee/user/project/repository/branches/protected.html) and restricted push and merge access, save growing teams time while providing an additional layer of security. \n\n## 2. You need to build a robust review process.\n\nMany teams have senior developers review security-sensitive code. However, as your codebase expands, it becomes more challenging to ensure the right people are reviewing the right changes. This can lead to an elongated review process or the release of insecure code before it’s been reviewed by the right parties. \n\nWhen you notice security reviews becoming inconsistent or creating bottlenecks, it’s time to consider solutions that give you tighter control over your merge request pipelines. \n\n### Features that enhance the review process\n\nGitLab Premium helps teams mature beyond manual processes with capabilities like [Multiple Approvers](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/) and [push rules](https://docs.gitlab.com/ee/user/project/repository/push_rules.html). These features improve your code by ensuring it’s reviewed before it is merged, preventing errors from occurring late in the development process. It also requires higher levels of authorization and verification to those who push or commit to a git branch. \n\n## 3. You need to strengthen compliance adherence.\n\nWhen your team is small, you know who is working on what projects and when deployments will occur. But, as your team grows it becomes more challenging (if not impossible) to follow all code changes and activities. It’s also easy to lose sight of security policies and whether all team members are consistently following them.\n\nThese are signs that you need tools to help you track changes and ensure code quality meets regulatory requirements. \n\n### Features that improve compliance efforts\n\nWith GitLab Premium’s [Audit Events](https://docs.gitlab.com/ee/administration/audit_event_reports.html), you can track and review changes, such as who performed certain actions at what time within the repository. At the same time, [Code Quality Reports](https://docs.gitlab.com/ee/ci/testing/code_quality.html) can check for adherence to compliance standards. This can help teams more readily prove compliance while also quickly identifying and fixing problems within the code. \n\n## Scale your security efforts with GitLab Premium \n\nIf you’re experiencing security-related growing pains as your business scales, consider upleveling your security needs before it’s too late. Empower your team with features that prioritize security and compliance, and accelerate software delivery. \n\n> #### [Upgrade to GitLab Premium today!](https://about.gitlab.com/pricing/premium/why-upgrade/)","security",[693,9,695],"features",{"slug":697,"featured":91,"template":698},"3-signs-your-team-is-ready-to-uplevel-security-controls-in-gitlab","BlogPost","content:en-us:blog:3-signs-your-team-is-ready-to-uplevel-security-controls-in-gitlab.yml","3 Signs Your Team Is Ready To Uplevel Security Controls In Gitlab","en-us/blog/3-signs-your-team-is-ready-to-uplevel-security-controls-in-gitlab.yml","en-us/blog/3-signs-your-team-is-ready-to-uplevel-security-controls-in-gitlab",{"_path":704,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":705,"content":711,"config":721,"_id":723,"_type":14,"title":724,"_source":16,"_file":725,"_stem":726,"_extension":19},"/en-us/blog/4-ways-to-accelerate-embedded-development-with-gitlab",{"title":706,"description":707,"ogTitle":706,"ogDescription":707,"noIndex":6,"ogImage":708,"ogUrl":709,"ogSiteName":685,"ogType":686,"canonicalUrls":709,"schema":710},"4 ways to accelerate embedded development with GitLab","Learn how automated hardware testing, standard builds, collaborative workflows, and integrated compliance eliminate bottlenecks in firmware development.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659756/Blog/Hero%20Images/REFERENCE_-_display_preview_for_blog_images.png","https://about.gitlab.com/blog/4-ways-to-accelerate-embedded-development-with-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"4 ways to accelerate embedded development with GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Matt DeLaney\"},{\"@type\":\"Person\",\"name\":\"Darwin Sanoy\"}],\n        \"datePublished\": \"2025-06-05\",\n      }",{"title":706,"description":707,"authors":712,"heroImage":708,"date":715,"body":716,"category":717,"tags":718},[713,714],"Matt DeLaney","Darwin Sanoy","2025-06-05","Software in embedded systems is no longer just a part number — it's a critical differentiator. This shift has led to enormous complexity in the firmware running in our cars, airplanes, and industrial machines. The number of lines of code in the average car is expected to reach [650 million](https://www.statista.com/statistics/1370978/automotive-software-average-lines-of-codes-per-vehicle-globally/) by the end of 2025, up from 200 million just five years ago. In aerospace systems, the complexity of embedded software has nearly [doubled every four years](https://www.mckinsey.com/industries/aerospace-and-defense/our-insights/debugging-the-software-talent-gap-in-aerospace-and-defense) for the last several decades. \n\nTraditional embedded development approaches cannot effectively handle the software challenges of modern machines. This shortcoming slows engineers down, in part, by exacerbating challenges such as: \n\n* [Hardware testing bottlenecks](#challenge-1-hardware-testing-bottlenecks) \n* [Inconsistent build environments](#challenge-2-inconsistent-build-environments)\n* [Siloed development practices](#challenge-3-siloed-development-practices)\n* [Manual functional safety compliance processes](#challenge-4-manual-functional-safety-compliance-processes)\n\nEmbedded developers need a new approach to deal with the rapid increase in code. In this article, we’ll explain four ways you can use the GitLab AI-native DevSecOps platform to shorten feedback loops, work collaboratively and iteratively, and streamline compliance.\n\n## Challenge 1: Hardware testing bottlenecks\n\nUnlike enterprise software that can run on virtually any cloud server, embedded automotive software must be tested on specialized hardware that precisely matches production environments. Traditional hardware-in-the-loop (HIL) testing processes often follow this pattern:\n\n1. Developers write code for an embedded system (e.g., an electronic control unit)  \n2. They request access to limited, expensive hardware test benches (costing $500,000-$10M each)  \n3. They wait days or weeks for their scheduled access window  \n4. They manually deploy and test their code on physical hardware at their desks  \n5. They document results, pass the hardware to the next developer, and go to the back of the hardware testing queue\n\nThis process is extremely inefficient. Embedded developers may finish writing their code today and wait weeks to test it on a hardware target. By then, they've moved on to other tasks. This context switching drains productivity. Not only that, developers may wait weeks to learn they had a simple math error in their code. \n\n### Solution: Automated hardware allocation and continuous integration\n\nYou can streamline hardware testing through automation using the [GitLab On-Premises Device Cloud](https://gitlab.com/guided-explorations/embedded/ci-components/device-cloud), a CI/CD component. This lets you automate the orchestration of scarce hardware resources, turning a manual, time-intensive process into a streamlined, continuous workflow.\n\nThe On-Premises Device Cloud:\n\n1. Creates pools of shared hardware resources  \n2. Automatically — and exclusively — allocates hardware to a developer’s hardware testing pipeline tasks based on availability  \n3. Deploys and executes tests without manual intervention  \n4. Collects and reports results through integrated pipelines  \n5. Automatically deallocates hardware back into the “available” pool\n\nAfter submitting code, you’ll receive results in hours instead of days, often without ever physically touching the test hardware.\n\nWhat this video for an introduction to the GitLab On-Premises Device Cloud CI/CD Component to orchestrate the remote allocation of shared hardware for HIL:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/ltr2CIM9Zag?si=NOij3t1YYz4zKajC\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\nYou can also adopt multi-pronged testing strategies that balance speed and quality. Bring the following embedded test patterns and environments into automated GitLab CI pipelines:\n\n* **Software-in-the-loop (SIL):** Testing on virtual hardware simulators for quicker initial feedback  \n* **Processor-in-the-loop (PIL):** Testing on representative processor hardware for faster feedback at a lower cost  \n* **Hardware-in-the-loop (HIL):** Testing on full production-equivalent hardware and test benches for late-stage verification\n\nBy automating the orchestration of these tests within CI pipelines, you’ll be able to identify issues earlier, iterate faster, and accelerate time to market.\n\n## Challenge 2: Inconsistent build environments\n\nAnother significant challenge in embedded development is build environment inconsistency. Embedded developers often manually execute builds on their local machines with varying configurations, compiler versions, and dependencies. Then they’ll paste the binaries from their local build to a shared codebase.\n\nThis approach creates several problems:\n\n* **Inconsistent outputs:** Builds for the same source code produce different results on different machines  \n* **\"Works on my machine\" syndrome:** Code that builds locally fails in shared environments  \n* **Poor traceability:** Limited audit trail of who built what and when  \n* **Knowledge silos:** Build expertise becomes concentrated in a few individuals\n\nThis approach can lead to errors, bottlenecks, and costly delays. \n\n### Solution: Standardized build automation\n\nYou can address these challenges by implementing standardized build automation within CI/CD pipelines in GitLab. This approach creates consistent, repeatable, container-based build environments that eliminate machine-specific variations. Through the use of special Embedded Gateway Runner provisioning scripts, containers can interface with hardware for flashing and port monitoring for automated testing.\n\nKey elements of this solution include:\n\n* **Lifecycle managed environments:** Define complex embedded simulation environments as code; automatically deploy environments for testing and destroy them afterward  \n* **Containerization:** Use Docker containers to ensure identical build environments  \n* **Automated dependency management:** Control and version all dependencies  \n* **Central build execution:** Run builds on shared infrastructure rather than local machines\n\n> Follow this tutorial to learn [how to automate embedded software builds within a GitLab CI pipeline](https://gitlab.com/guided-explorations/embedded/workshops/embedded-devops-workshop-refactoring-to-ci/-/blob/main/TUTORIAL2.md%20).\n\nBy standardizing and automating the build process, you can ensure that every build follows the same steps with the same dependencies, producing consistent outputs regardless of who initiated it. This not only improves quality but also democratizes the build process, enabling more team members to participate without specialized knowledge.\n\n## Challenge 3: Siloed development practices\n\nEnterprise development teams have widely adopted collaborative practices such as DevOps, underpinned by shared source code management (SCM) and continuous integration/continuous delivery (CI/CD) systems. Embedded developers, on the other hand, have historically worked alone at their desks. There are valid technical reasons for this. \n\nFor example, consider hardware virtualization, which is a key enabler of DevOps automation. The industry has been slower to virtualize the massive range of specialized processors and boards used in embedded systems. This is due in large part to the difficulties of virtualizing production real-time systems and the associated lack of economic incentives. Compare that to cloud virtualization which has been commoditized and benefited enterprise SaaS development for over a decade.\n\nMany providers are now embracing virtualization-first for the sake of speeding up embedded development. If teams fail to adopt virtual testing options, however, their silos will remain and negatively impact the business through: \n\n* **Knowledge fragmentation**: Critical insights remain scattered across individuals and teams  \n* **Redundant development**: Multiple teams solve identical problems, creating inconsistencies  \n* **Late-stage discovery during big-bang integrations**: Problems are found late in the process when multiple developers integrate their code at once, when errors are more costly to fix  \n* **Stifled innovation**: Solutions from one domain rarely influence others, hampering the development of new product ideas\n\n### Solution: Collaborative engineering through a unified platform\n\nAn important step in breaking down these silos is to standardize embedded development around GitLab’s unified DevSecOps platform. In this regard, GitLab is aligned with the shift of embedded systems toward more consolidated, shared platforms on embedded devices. GitLab enables:\n\n* **Shared visibility:** Make all code, Issues, and documentation visible across teams  \n* **Collaborative workflows:** Enable peer review and knowledge sharing through merge requests  \n* **Centralized knowledge:** Maintain a single source of truth for all development artifacts  \n* **Asynchronous collaboration:** Allow teams to work together across different locations and time zones\n\nHuman-AI agent collaboration is a fundamental ingredient to fueling the customer-facing innovations that digital natives and established embedded brands desire. GitLab enables human-AI collaboration as well. By creating transparency across the development lifecycle, GitLab changes embedded development from an isolated activity to a collaborative practice. Engineers can see each other's work in progress, learn from collective experiences, and build upon shared solutions.\n\nWatch this presentation from Embedded World Germany 2025, which explains the power of embedded developers collaborating and sharing “work in progress”. The demo portion from 24:42 to 36:51 shows how to integrate HIL into a GitLab CI pipeline and enable collaborative development.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/F_rlOyq0hzc?si=eF4alDY6HK98uZPj\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\nPerhaps most importantly, by achieving greater collaboration through DevSecOps, teams can unlock embedded systems innovations that would otherwise remain hidden. Indeed, collaboration fuels innovation. [One study](https://www.sciencedirect.com/science/article/abs/pii/S0749597800928887), for example, found that group brainstorming, when properly structured, can lead to more innovative and creative outcomes than individuals working alone. Collaborative development is crucial in the race to develop software-defined products. \n\n## Challenge 4: Manual functional safety compliance processes\n\nEmbedded systems in the automotive and aerospace industries must comply with rigorous functional safety standards, including ISO 26262, MISRA C/C++, DO-178C, and DO-254. Traditional compliance approaches involve manual reviews, extensive documentation, and separate verification activities that occur late in the development cycle. This often creates security review bottlenecks. When specialized embedded security and code quality scanners detect vulnerabilities in a developer’s code, the scan issue gets added to a pile of other issues that haven’t been resolved. Developers can’t integrate their code, and security personnel need to wade through a backlog of code violations. This creates delays and makes compliance more difficult. \n\nSome of the challenges can best be summed up as: \n\n* **Late-stage compliance issues**: Problems discovered after development is complete  \n* **Documentation burden**: Extensive manual effort to create and maintain compliance evidence  \n* **Process bottlenecks**: Serial compliance activities that block development progress  \n* **Expertise dependence**: Reliance on limited specialists for compliance activities\n\nAs a result, teams often need to choose between velocity and compliance — a precarious trade-off in safety-critical systems.\n\n### Solution: Automated functional safety compliance workflow building blocks\n\nRather than treating security and compliance as post-development verification activities, you can codify compliance requirements and enforce them automatically through [customizable frameworks in GitLab](https://about.gitlab.com/blog/introducing-custom-compliance-frameworks-in-gitlab/). To do this for functional safety standards, in particular, you can integrate GitLab with specialized embedded tools, which provide the depth of firmware scanning required by functional safety standards. Meanwhile, GitLab provides automated compliance checks, full audit trails, and merge request gating — all features needed to support a robust continuous compliance program. \n\nThis integrated approach includes:\n\n* **Compliance-as-code:** Define compliance requirements as automated checks  \n* **Integrated specialized tools:** Connect tools like CodeSonar into the DevSecOps platform for automotive-specific compliance  \n* **Continuous compliance verification:** Verify requirements throughout development  \n* **Automated evidence collection:** Gather compliance artifacts as a by-product of development\n\nWatch this video to learn how to use Custom Compliance Frameworks in GitLab to create your own compliance policies. You can create compliance policies related to any standard (e.g., ISO 26262) and automatically enforce those policies in GitLab.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/S-FQjzSyVJw?si=0UdtGNuugLPG0SLL\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\nBy shifting compliance left and embedding it within normal development workflows, you can maintain safety standards without sacrificing velocity. Automated checks catch issues early when they're easier and less expensive to fix, while continuous evidence collection reduces the documentation burden.\n\n## Realizing the power of embedded DevOps\n\nEmbedded development is changing fast. Teams that remain stuck in manual processes and isolated workflows will find themselves increasingly left behind, while those that embrace automated, collaborative practices will define the future of software-defined smart systems.\n\nExplore our [Embedded DevOps Workshop](https://gitlab.com/guided-explorations/embedded/workshops/embedded-devops-workshop-refactoring-to-ci) to start automating embedded development workflows with GitLab, or [watch this presentation from GitLab's Field Chief Cloud Architect](https://content.gitlab.com/viewer/0a35252831bd130f879b0725738f70ed) to learn how leading organizations are bringing hardware-in-the-loop testing into continuous integration workflows to accelerate embedded development.\n\n## Learn more\n\n- [Why GitLab Premium with Duo for embedded systems development?](https://content.gitlab.com/viewer/438451cba726dd017da7b95fd0fb1b59)\n- [Why GitLab Ultimate with Duo for embedded systems development?](https://content.gitlab.com/viewer/87f5104c26720e2c0d73a6b377522a44)\n- [More embedded development systems presentations from GitLab](https://content.gitlab.com/viewer/e59c40099d5e3c8f9307afb27c4a923f)","product",[9,719,695,720],"tutorial","embedded DevOps",{"slug":722,"featured":6,"template":698},"4-ways-to-accelerate-embedded-development-with-gitlab","content:en-us:blog:4-ways-to-accelerate-embedded-development-with-gitlab.yml","4 Ways To Accelerate Embedded Development With Gitlab","en-us/blog/4-ways-to-accelerate-embedded-development-with-gitlab.yml","en-us/blog/4-ways-to-accelerate-embedded-development-with-gitlab",{"_path":728,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":729,"content":735,"config":739,"_id":741,"_type":14,"title":742,"_source":16,"_file":743,"_stem":744,"_extension":19},"/en-us/blog/5-gitlab-premium-features-to-help-your-team-scale",{"title":730,"description":731,"ogTitle":730,"ogDescription":731,"noIndex":6,"ogImage":732,"ogUrl":733,"ogSiteName":685,"ogType":686,"canonicalUrls":733,"schema":734},"5 GitLab Premium features to help your team scale","Explore how GitLab Premium boosts team collaboration and productivity, enabling organizations to scale with streamlined workflows and advanced capabilities.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749665151/Blog/Hero%20Images/blog-image-template-1800x945__27_.png","https://about.gitlab.com/blog/5-gitlab-premium-features-to-help-your-team-scale","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"5 GitLab Premium features to help your team scale\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Julie Griffin\"}],\n        \"datePublished\": \"2024-12-18\",\n      }",{"title":730,"description":731,"authors":736,"heroImage":732,"date":691,"body":737,"category":717,"tags":738},[690],"As development teams grow, what once worked for a small team often becomes a bottleneck. Code standards become inconsistent, operational silos develop, and technical debt accumulates faster. What was a well-oiled machine is now dysfunctional as more team members, projects, and tools are added on. \n\nMany teams experience these challenges as they grow, but how you handle and address these growing pains can save you time, energy, and money in the long run. In this article, we’ll explore the common pitfalls growing teams face and how successful organizations address them. \n\n## 1. Consistent code quality\n\nOne of the challenges growing teams face is [maintaining consistent code quality](https://about.gitlab.com/blog/transform-code-quality-and-compliance-with-automated-processes/) as more developers contribute to the codebase. Quality issues that were once caught quickly now take longer to identify and fix.\n\nSuccessful teams address these challenges through automated code analysis throughout their development workflow. Instead of relying solely on manual reviews, they implement systems to identify potential issues and enforce consistent standards before code even reaches human reviewers. This approach helps detect complexity issues early and flags potential security vulnerabilities, allowing reviewers to focus on more strategic aspects of code review.\n\n### Features that maintain consistent code quality\n\n* Start by automating code analysis in your workflow. With GitLab Premium, you can set up [Code Quality Reports](https://docs.gitlab.com/ee/ci/testing/code_quality.html) in your merge requests. This helps catch issues early by analyzing code complexity and quality before review begins. For example, when a developer submits changes that might increase technical debt, the report will flag these issues automatically.  \n* Next, establish automated quality standards. Configure Quality Gates to define what \"good code\" means for your team. This could include test coverage requirements, complexity limits, or specific coding patterns. When code doesn't meet these standards, merges are automatically blocked until issues are addressed.  \n* Finally, prevent issues before they even reach review. [Push Rules](https://docs.gitlab.com/ee/user/project/repository/push_rules.html) let you enforce standards right at commit time. You might start with simple rules like requiring certain commit message formats, then gradually add more sophisticated checks as your team adapts.\n\n## 2. Improve collaboration and productivity\n\nThe priorities for startups are often budget and speed, but as businesses grow, tracking DevSecOps workflows across a patchwork of tools can actually deter productivity.\n\nDisparate tools cause developers to context switch between platforms, decreasing focus time and development speed. Toolchain sprawl also limits visibility among teams, creating operational silos that lead to miscommunication.\n\nTo address these challenges, teams often turn to Agile solutions to help with project management, align timelines, and improve cross-team collaboration. When combined with a DevSecOps environment, [Agile](https://about.gitlab.com/topics/agile-devsecops/) creates a powerful system for software development that marries the iterative Agile approach with a security-first mindset. \n\n### Features that improve collaboration and productivity\n\n* With GitLab Premium, teams can access enterprise-grade Agile tools within their DevSecOps platforms. You can start by creating [groups and projects](http://%20epics), assigning team members roles, and determining their level of permission.   \n* [Milestones](https://docs.gitlab.com/ee/user/project/milestones/) and [epics](https://docs.gitlab.com/ee/user/group/epics/index.html) help teams plan large-scale initiatives across multiple projects to track dependencies, progress, and align on deliverables. This gives everyone clear visibility into the process.  \n* Then, dive deeper into each task with [issues](https://docs.gitlab.com/ee/user/project/issue_board.html). With customizable workflows and multi-assignee capabilities, teams can visualize project progress, dynamically adjust priorities, and collaborate on issue resolution.\n\n## 3. Increase deployment velocity\n\nIn theory, teams should be more productive as they scale. However, if tools aren’t updated to accommodate a growing team, the CI/CD pipeline can feel clunky and inefficient. \n\nTeams turn to tools that help them automate and optimize the [CI/CD pipeline](https://about.gitlab.com/topics/ci-cd/cicd-pipeline/). By automating components like code reviews, merge trains, and permissions, teams can streamline the CI/CD pipeline and improve deployment speed. \n\n### Features that increase deployment velocity\n\nGitLab Premium offers advanced features that help you build, maintain, deploy, and monitor complex pipelines. Increase the speed of deployment through the CI/CD pipeline with more control over code reviews and merge request processes.\n\n* You can automate the merging of multiple changes in a controlled sequence with Merge Trains. This reduces integration issues and improves deployment efficiency.  \n* Gain visibility into whether your jobs passed or failed with Multi-Project Pipeline Graphs. Access all related jobs for a single commit and the net result of each stage of your pipeline to quickly see what failed and fix it.  \n* Team leaders can access comprehensive insights and make data-informed decisions with [Code Review Analytics](https://docs.gitlab.com/ee/user/analytics/code_review_analytics.html) that provide detailed metrics and merge request analytics. This helps teams identify bottlenecks, optimize review cycles, and establish data-driven process improvements. \n\n## 4. Enhance security and compliance controls\n\nWithout rigorous governance policies, inefficient and insecure code may be released. With smaller companies, security reviews are often manual and the reviews often take a backseat to speed. This can lead to teams releasing incorrect or unsafe code to production causing costly delays. \n\nTo [evolve their security practices](https://about.gitlab.com/blog/3-signs-your-team-is-ready-to-uplevel-security-controls-in-gitlab/), teams turn to stricter access controls, a more refined and delineated review process, as well as features that enable teams to review and track changes. \n\n### Features that enhance security and compliance controls\n\n* With stricter access controls, such as [Protected Branches and Protected Environments](https://docs.gitlab.com/ee/user/project/repository/branches/protected.html), you can restrict push and merge access, securing those areas from unwanted changes by unauthorized users.  \n* To strengthen security review processes, implement [Multiple Approvers in Merge Requests](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/). This requires team members to review and approve code changes before they’re pushed through.  \n* Review who performed a certain action within the repository and at what time with [Audit Events](https://docs.gitlab.com/ee/administration/audit_event_reports.html). By tracking changes, you’re able to stay on top of compliance requirements. \n\n## 5. Avoid downtime and delays\n\nWithout support, teams are left to troubleshoot issues themselves. This can lead to major delays or periods of downtime where the company is unable to deliver value. As companies grow, this downtime becomes more and more detrimental to the business. \n\nIt’s important to evaluate what your company’s threshold is for downtime. When the value of the downtime outweighs the cost of support, it’s time to scale your DevSecOps platform to meet those needs. \n\n### Support services to avoid downtime and delays\n\nWith GitLab Premium, customers of both SaaS and self-managed instances have access to [Priority Support](https://about.gitlab.com/support/#priority-support). GitLab customer support offers Tiered Support response times, ranging from emergency to low-impact services, and can help you resolve issues quickly, minimizing downtime and disruption to your development cycle.\n\nPlus, for self-managed customers moving to Premium, GitLab offers support for any issues that occur after implementation and upgrade assistance to provide a seamless transition. \n\n## Build today, scale for tomorrow with GitLab Premium\n\nInstead of struggling with the challenges that growing teams face, scale your DevSecOps platform with GitLab Premium. \n\nGitLab Premium provides teams with the project management, pipeline tools, security, and support needed to work efficiently and effectively across the software development lifecycle. \n\n> #### Learn more about [why you should upgrade to GitLab Premium](https://about.gitlab.com/pricing/premium/why-upgrade/).",[9,695],{"slug":740,"featured":6,"template":698},"5-gitlab-premium-features-to-help-your-team-scale","content:en-us:blog:5-gitlab-premium-features-to-help-your-team-scale.yml","5 Gitlab Premium Features To Help Your Team Scale","en-us/blog/5-gitlab-premium-features-to-help-your-team-scale.yml","en-us/blog/5-gitlab-premium-features-to-help-your-team-scale",{"_path":746,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":747,"content":753,"config":761,"_id":763,"_type":14,"title":764,"_source":16,"_file":765,"_stem":766,"_extension":19},"/en-us/blog/a-guide-to-the-breaking-changes-in-gitlab-18-0",{"title":748,"description":749,"ogTitle":748,"ogDescription":749,"noIndex":6,"ogImage":750,"ogUrl":751,"ogSiteName":685,"ogType":686,"canonicalUrls":751,"schema":752},"A guide to the breaking changes in GitLab 18.0","Prepare now for the removals in our upcoming major release. Assess your impact and then review the mitigation steps provided in the documentation to ensure a smooth transition to GitLab 18.0.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659437/Blog/Hero%20Images/AdobeStock_398929148.jpg","https://about.gitlab.com/blog/a-guide-to-the-breaking-changes-in-gitlab-18-0","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"A guide to the breaking changes in GitLab 18.0\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Martin Brümmer\"},{\"@type\":\"Person\",\"name\":\"Fabian Zimmer\"},{\"@type\":\"Person\",\"name\":\"Sam Wiskow\"}],\n        \"datePublished\": \"2025-04-18\",\n      }",{"title":748,"description":749,"authors":754,"heroImage":750,"date":758,"body":759,"category":717,"tags":760},[755,756,757],"Martin Brümmer","Fabian Zimmer","Sam Wiskow","2025-04-18","GitLab 18.0, our next major release, will be packed with new features that push the boundaries of DevSecOps innovation. At the same time, we’ll be removing some deprecated features from GitLab. Here is what you need to know about these breaking changes and how you can mitigate their impact.\n\n## Deployment windows\n\n### GitLab.com  \n\nBreaking changes for GitLab.com will be limited to these three windows. \n\n- April 21-23, 2025  \n- April 28-30, 2025  \n- May 5-7, 2025\n\nMany other changes will continue to roll out throughout the month. You can learn more about the high-impact changes occurring within each of these windows in this [breaking changes documentation](https://docs.gitlab.com/update/breaking_windows/).\n\n***Note:** Breaking changes may fall slightly outside of these windows in exceptional circumstances.*\n\n### GitLab Self-Managed\n\nGitLab 18.0 will be available starting on May 15. You can learn more about the release schedule [here](https://about.gitlab.com/releases/).\n\n### GitLab Dedicated\n\nThe upgrade to GitLab 18.0 will take place during your maintenance window from June 24-29, 2025. You can learn more and find your assigned maintenance window [here](https://docs.gitlab.com/administration/dedicated/maintenance/#release-rollout-schedule).\n\nWe’ve also developed custom tooling and resources to help you assess the impact of these changes on your environment and plan any necessary actions ahead of the 18.0 upgrade. You can find [information about these mitigation tools and resources](#tools-and-resources-to-manage-your-impact).\n\nVisit the [Deprecations page](https://docs.gitlab.com/ee/update/deprecations?removal_milestone=18.0) to see a full list of items scheduled for removal in 18.0. Read on to learn what’s coming and how to prepare for this year’s release based on your specific deployment.\n\n## Breaking changes\n\n### High impact\n\n**1. CI/CD job token - “Limit access from your project” setting removal**\n\nGitLab.com | Self-Managed | Dedicated\n\nIn GitLab 14.4, we introduced a setting to **[limit access *from* your project's CI/CD job tokens (CI_JOB_TOKEN)](https://docs.gitlab.com/ci/jobs/ci_job_token/#limit-your-projects-job-token-access)** for added security. This setting was called **Limit CI_JOB_TOKEN access**. In GitLab 16.3, we renamed this setting **Limit access *from* this project** for clarity.\n\nIn GitLab 15.9, we introduced an alternative setting called **[Authorized groups and projects](https://docs.gitlab.com/ci/jobs/ci_job_token/#add-a-group-or-project-to-the-job-token-allowlist)**. This setting controls job token access to your project by using an allowlist. This new setting is a significant improvement over the original. The first iteration was deprecated in GitLab 16.0 and scheduled for removal in GitLab 18.0.\n\nThe **Limit access *from* this project** setting is disabled by default for all new projects. In GitLab 16.0 and later, you cannot re-enable this setting after it is disabled in any project. Instead, use the **Authorized groups and projects** setting to control job token access to your projects.\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#cicd-job-token---limit-access-from-your-project-setting-removal)\n- [GitLab Detective check available](https://gitlab.com/gitlab-com/support/toolbox/gitlab-detective/-/blob/main/README.md)\n\n**2. CI/CD job token - Authorized groups and projects allowlist enforcement**\n\nGitLab.com | Self-Managed | Dedicated\n\nWith the **[Authorized groups and projects setting](https://docs.gitlab.com/ee/ci/jobs/ci_job_token.html#add-a-group-or-project-to-the-job-token-allowlist)** introduced in GitLab 15.9 (renamed from **Limit access to this project** in GitLab 16.3), you can manage CI/CD job token access to your project. When set to **Only this project and any groups and projects in the allowlist**, only groups or projects added to the allowlist can use job tokens to access your project.\n\n* **Prior to GitLab 15.9**, the allowlist was disabled by default ([**All groups and projects**](https://docs.gitlab.com/ee/ci/jobs/ci_job_token.html#allow-any-project-to-access-your-project) access setting selected), allowing job token access from any project.   \n* **Since GitLab 17.6**, administrators for GitLab Self-Managed and Dedicated instances have had the option to [**enforce a more secure setting for all projects**](https://docs.gitlab.com/ee/administration/settings/continuous_integration.html#job-token-permissions), which prevents project maintainers from selecting **All groups and projects**. This change ensures a higher level of security between projects.   \n* In GitLab 18.0, this setting will be enabled by default. On GitLab.com, we will automatically populate your projects’ allowlists based on your project authentication logs.   \n* To prepare for this change on **GitLab.com**, project maintainers using the job token for cross-project authentication should populate their project's **Authorized groups and projects** allowlists. They should then change the setting to **Only** **this project and any groups and projects in the allowlist**. We encourage the use of available [migration tooling](https://docs.gitlab.com/ci/jobs/ci_job_token/#auto-populate-a-projects-allowlist) to ***automate*** the creation of the allowlist based on the project’s [authentication logs](https://docs.gitlab.com/ci/jobs/ci_job_token/#job-token-authentication-log) prior to GitLab 18.0.   \n* **Self-Managed users** should populate the allowlists before completing the 18.0 upgrade.   \n* **Dedicated users** should work with their GitLab account team to develop the appropriate strategy for their specific instance.\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#cicd-job-token---authorized-groups-and-projects-allowlist-enforcement)\n- [Documentation](https://docs.gitlab.com/ci/jobs/ci_job_token/#add-a-gr)\n- [GitLab Detective check available](https://gitlab.com/gitlab-com/support/toolbox/gitlab-detective/-/blob/main/README.md)\n\n**3. Dependency Proxy token scope enforcement**\n\nGitLab.com | Self-Managed | Dedicated\n\nThe Dependency Proxy for containers accepts **`docker login`** and **`docker pull`** requests using **personal, project,** or **group** access tokens without validating their scopes.\n\nIn GitLab 18.0, the Dependency Proxy will require both **`read_registry`** and **`write_registry`** scopes for authentication. After this change, authentication attempts using tokens without these scopes will be **rejected**.\n\nBefore upgrading, create new access tokens with the [**required scopes**](https://docs.gitlab.com/ee/user/packages/dependency_proxy/#authenticate-with-the-dependency-proxy-for-container-images), and update your workflow variables and scripts with these new tokens.\n\nYou also have the option to use [**Dependency Token Checker**](https://gitlab.com/gitlab-com/cs-tools/gitlab-cs-tools/dependancy-token-checker/), a community-developed script that allows you to view tokens and rotate them automatically.\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#dependency-proxy-token-scope-enforcement)\n\n### Medium impact\n\n**1. New data retention limits for vulnerabilities on GitLab.com**\n\nGitLab.com - **Ultimate tier customers only**\n\nStarting in GitLab 18.1 with a phased six-month rollout, we will be introducing a **new data retention limit** for GitLab.com **Ultimate** customers to improve system performance and reliability. The data retention limit affects how long your vulnerability data is stored.\n\nVulnerabilities older than 12 months that have not been updated will be automatically moved to cold storage archives. These archives:\n\n* Remain accessible and downloadable through the GitLab UI  \n* Are retained for 3 years  \n* Are permanently deleted after 3 years \n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#new-data-retention-limits-for-vulnerabilities-on-gitlabcom)\n- [Documentation](https://handbook.gitlab.com/handbook/security/records-retention-deletion/)\n\n**2. Reject container image pull policies not in `allowed_pull_policies`**\n\nGitLab.com | Self-Managed | Dedicated  \n\nAll configured pull policies should be present in the [**allowed_pull_policies configuration**](https://docs.gitlab.com/runner/executors/docker/#allow-docker-pull-policies) specified in the runner's **`config.toml`** file. If they are not, the job should fail with an **`incompatible pull policy`** error.\n\nIn the current implementation, when multiple pull policies are defined, jobs pass if at least one pull policy matches those in **`allowed-pull-policies`**, even if other policies are not included.\n\nIn GitLab 18.0, jobs will fail only if none of the pull policies match those in **`allowed-pull-policies`**. However, unlike past behavior, jobs will use only the pull policies listed in **`allowed-pull-policies`**. This distinction can cause jobs that currently pass to fail in GitLab 18.0.\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#reject-container-image-pull-policies-not-in-allowed_pull_policies)\n- [Documentation](https://docs.gitlab.com/runner/executors/docker/#allow-docker-pull-policies)\n\n**3. PostgreSQL 14 and 15 no longer supported**\n\nSelf-Managed \n\nGitLab follows an [**annual upgrade cadence for PostgreSQL**](https://handbook.gitlab.com/handbook/engineering/infrastructure-platforms/data-access/database-framework/postgresql-upgrade-cadence/).\n\nSupport for PostgreSQL 14 and 15 is scheduled for removal in GitLab 18.0. In GitLab 18.0, PostgreSQL 16 becomes the minimum required version of PostgreSQL.\n\nPostgreSQL 14 and 15 will be supported for the full GitLab 17 release cycle. PostgreSQL 16 will also be supported for instances that want to upgrade prior to GitLab 18.0.\n\nTo prepare for this change on instances that don't use [**PostgreSQL Cluster**](https://docs.gitlab.com/administration/postgresql/replication_and_failover/) (for example, if you are running a single PostgreSQL instance you installed with an Omnibus Linux package), upgrades to GitLab 17.11 will attempt to automatically upgrade PostgreSQL to Version 16. If you use [**PostgreSQL Cluster**](https://docs.gitlab.com/administration/postgresql/replication_and_failover/) or [**opt out of this automated upgrade**](https://docs.gitlab.com/omnibus/settings/database/#opt-out-of-automatic-postgresql-upgrades), you must [**manually upgrade to PostgreSQL 16**](https://docs.gitlab.com/omnibus/settings/database/#upgrade-packaged-postgresql-server) to be able to upgrade to GitLab 18.0. Make sure you have sufficient disk space to accommodate the upgrade.\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#postgresql-14-and-15-no-longer-supported)\n- [Documentation](https://docs.gitlab.com/omnibus/settings/database/#upgrade-packaged-postgresql-server)\n- [Migration guidelines](https://docs.gitlab.com/omnibus/development/managing-postgresql-versions/)\n\n**4. Deprecate the Terraform CI/CD templates**\n\nSelf-Managed\n\nThe Terraform CI/CD templates are deprecated and will be removed in GitLab 18.0. This affects the following templates:\n\n* `Terraform.gitlab-ci.yml`  \n* `Terraform.latest.gitlab-ci.yml`  \n* `Terraform/Base.gitlab-ci.yml`  \n* `Terraform/Base.latest.gitlab-ci.yml`\n\nGitLab won't be able to update the **`terraform`** binary in the job images to any version that is licensed under the BSL.\n\nTo continue using Terraform, clone the templates and [**Terraform image**](https://gitlab.com/gitlab-org/terraform-images), and maintain them as needed. GitLab provides [**detailed instructions**](https://gitlab.com/gitlab-org/terraform-images) for migrating to a custom-built image.\n\n**As an alternative, we recommend using the new OpenTofu CI/CD component on GitLab.com or the new OpenTofu CI/CD template on GitLab Self-Managed.** CI/CD components are not yet available on GitLab Self-Managed, however, [**Issue #415638**](https://gitlab.com/gitlab-org/gitlab/-/issues/415638) proposes adding this feature. If CI/CD components become available on GitLab Self-Managed, the OpenTofu CI/CD template will be removed.\n\nRead more about the new [OpenTofu CI/CD component](https://gitlab.com/components/opentofu).\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#deprecate-terraform-cicd-templates)\n\n**5. Major update of the Prometheus subchart**\n\nSelf-Managed\n\nWith GitLab 18.0 and GitLab chart 9.0, the Prometheus subchart will be updated from 15.3 to 27.3.\n\nAlong with this update, Prometheus 3 will be shipped by default.\n\nManual steps are required to perform the upgrade. If you have Alertmanager, Node Exporter, or Pushgateway enabled, you will also need to update your Helm values.\n\nPlease refer to the [**migration guide**](https://docs.gitlab.com/charts/releases/9_0/#prometheus-upgrade) for more information.\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#major-update-of-the-prometheus-subchart)\n\n### Low impact\n\n**1. No longer building SUSE Linux Enterprise Server 15 SP2 packages**\n\nSelf-Managed\n\nLong-term service and support (LTSS) for SUSE Linux Enterprise Server (SLES) 15 SP2 ended in December 2024.\n\nTherefore, we will no longer support the SLES SP2 distribution for Linux package installs. You should upgrade to SLES 15 SP6 for continued support.\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#support-for-suse-linux-enterprise-server-15-sp2)\n\n**2. Remove Gitaly rate limiter**\n\nSelf-Managed\n\nGitaly used to support [**RPC-based rate limiting**](https://gitlab.com/gitlab-org/gitaly/-/blob/4b7ea24f6172a03e7989879200b47b6fd0e2d059/doc/backpressure.md#L55-55). We are deprecating this feature as it does not achieve the desired results. Please see the deprecation issue for details.\n\nIf customers have the rate limiter configured (which is being deprecated), no error will be returned and the config will simply be ignored.\n\nCustomers should utilize the [**Concurrency Limiter**](https://docs.gitlab.com/administration/gitaly/concurrency_limiting/) instead.\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#gitaly-rate-limiting)\n\n**3. Deprecate NGINX controller image 1.3.1 support**\n\nSelf-Managed\n\nWe're upgrading the default NGINX controller image to 1.11.2. This new version requires new RBAC rules and some users set **nginx-ingress.rbac.create: false** to manage their own RBAC rules.\n\nThese users will need to add the RBAC rules before migrating to 1.11.2 or later. We added a fallback mechanism to only deploy 1.3.1 if this Helm value is set as above. We've also added **nginx-ingress.controller.image.disableFallback**, which defaults to false. Users who manage their own RBAC can set this to true to enable their deployments to also use 1.11.2, after ensuring the new RBAC rules are in place.\n\nWe plan to deprecate the 1.3.1 image support as well as the fallback mechanism as part of 17.5, so that we can remove this support completely and use only 1.11.2, which offers numerous security benefits.\n\n[Deprecation notice](https://docs.gitlab.com/update/deprecations/#fallback-support-for-gitlab-nginx-chart-controller-image-v131)\n\n**4. Application Security Testing analyzers major version update**\n\nGitLab.com | Self-Managed | Dedicated\n\nThe Application Security Testing stage will be bumping the major versions of its analyzers in tandem with the GitLab 18.0 release.\n\nIf you are not using the default included templates, or have pinned your analyzer versions, you must update your CI/CD job definition to either remove the pinned version or update the latest major version.\n\nUsers of GitLab 17.0-17.11 will continue to experience analyzer updates as normal until the release of GitLab 18.0. After GitLab 18.0, all newly fixed bugs and features will be released only in the new major version of the analyzers.\n\nWe do not backport bugs and features to deprecated versions as per our maintenance policy. As required, security patches will be backported to the latest three minor releases.\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#application-security-testing-analyzers-major-version-update)\n\n**5. API Discovery will use branch pipelines by default**\n\nGitLab.com | Self-Managed | Dedicated\n\nIn GitLab 18.0, we'll update the default behavior of the CI/CD template for API Discovery (**API-Discovery.gitlab-ci.yml**).\n\nBefore GitLab 18.0, this template configures jobs to run in [**merge request pipelines**](https://docs.gitlab.com/ci/pipelines/merge_request_pipelines/) by default when an MR is open.\n\nStarting in GitLab 18.0, we'll align this template's behavior with the behavior of the [**Stable template editions**](https://docs.gitlab.com/user/application_security/detect/roll_out_security_scanning/#template-editions) for other AST scanners:\n\n* By default, the template will run scan jobs in branch pipelines.  \n* You'll be able to set the CI/CD variable **AST_ENABLE_MR_PIPELINES: true** to use MR pipelines instead when an MR is open. The implementation of this new variable is tracked in [**Issue #410880**](https://gitlab.com/gitlab-org/gitlab/-/issues/410880).\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#api-discovery-will-use-branch-pipelines-by-default)\n\n**6. DAST DAST_DEVTOOLS_API_TIMEOUT will have a lower default value**\n\nGitLab.com | Self-Managed | Dedicated\n\nThe **DAST_DEVTOOLS_API_TIMEOUT** environment variable determines how long a DAST scan waits for a response from the browser. Before GitLab 18.0, the variable has a static value of 45 seconds. After GitLab 18.0, **DAST_DEVTOOLS_API_TIMEOUT** environment variable has a dynamic value, which is calculated based on other timeout configurations.\n\nIn most cases, the 45-second value was higher than the timeout value of many scanner functions. The dynamically calculated value makes the __DAST_DEVTOOLS_API_TIMEOUT__ variable more useful by increasing the number of cases to which it applies.\n\n- [Deprecation notice](https://docs.gitlab.com/update/deprecations/#dast-dast_devtools_api_timeout-will-have-a-lower-default-value) \n\n## Tools and resources to manage your impact\n\nWe’ve developed specific tooling to help our customers understand how these planned changes impact their GitLab instance(s). Once you’ve assessed your impact, we recommend reviewing the mitigation steps provided in the documentation to ensure a smooth transition to GitLab 18.0.\n\n* [Advanced Search Deprecations](https://gitlab.com/gitlab-com/cs-tools/gitlab-cs-tools/deprecation-migration-tools/advanced-search-deprecations): This tool uses GitLab's Advanced Search API to find strings related to deprecations across GitLab groups and projects. It also reports which files should be manually checked. *__Note:__ May have some false positives.*  \n* [Dependency Scanning Build Support Detection Helper](https://gitlab.com/security-products/tooling/build-support-detection-helper): This tool identifies projects impacted by three Dependency Scanning deprecations ([1](https://docs.gitlab.com/update/deprecations/#dependency-scanning-for-javascript-vendored-libraries), [2](https://docs.gitlab.com/update/deprecations/#dependency-scanning-upgrades-to-the-gitlab-sbom-vulnerability-scanner), [3](https://docs.gitlab.com/update/deprecations/#resolve-a-vulnerability-for-dependency-scanning-on-yarn-projects); all postponed to 19.0). It uses API to scan for relevant files and CI job names.\n* [GitLab Detective](https://gitlab.com/gitlab-com/support/toolbox/gitlab-detective/-/blob/main/README.md) (Self-Managed only): This experimental tool automatically checks a GitLab installation for known issues. It completes complex checks by looking at config files or database values. **Note:** Needs to run directly on your GitLab nodes.\n\nWe’ve also launched a series of micro courses (15 minutes or less!) on GitLab University to help you plan and execute mitigation activities for several of these changes. [Start your learning journey here](https://university.gitlab.com/catalog?query=18.0). \n\nIf you have a paid plan and have questions or require assistance with these changes, please [open a support ticket](https://about.gitlab.com/support/portal/) on the GitLab Support Portal. \n\nIf you are a [free Gitlab.com user](https://about.gitlab.com/support/statement-of-support/#free-users), you can access additional support through community sources, such as [GitLab Documentation](https://docs.gitlab.com/), [GitLab Community Forum](https://forum.gitlab.com/), and [Stack Overflow](http://stackoverflow.com/questions/tagged/gitlab).\n",[717,9],{"slug":762,"featured":6,"template":698},"a-guide-to-the-breaking-changes-in-gitlab-18-0","content:en-us:blog:a-guide-to-the-breaking-changes-in-gitlab-18-0.yml","A Guide To The Breaking Changes In Gitlab 18 0","en-us/blog/a-guide-to-the-breaking-changes-in-gitlab-18-0.yml","en-us/blog/a-guide-to-the-breaking-changes-in-gitlab-18-0",{"_path":768,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":769,"content":775,"config":782,"_id":784,"_type":14,"title":785,"_source":16,"_file":786,"_stem":787,"_extension":19},"/en-us/blog/a-guide-to-the-high-impact-breaking-changes-in-gitlab-17-0",{"title":770,"description":771,"ogTitle":770,"ogDescription":771,"noIndex":6,"ogImage":772,"ogUrl":773,"ogSiteName":685,"ogType":686,"canonicalUrls":773,"schema":774},"A guide to the high-impact breaking changes in GitLab 17.0","Find, assess, and mitigate the impact of deprecations and breaking changes in this year’s major release.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663000/Blog/Hero%20Images/tanukilifecycle.png","https://about.gitlab.com/blog/a-guide-to-the-high-impact-breaking-changes-in-gitlab-17-0","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"A guide to the high-impact breaking changes in GitLab 17.0\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Martin Brümmer\"},{\"@type\":\"Person\",\"name\":\"Sam Wiskow\"}],\n        \"datePublished\": \"2024-04-10\",\n      }",{"title":770,"description":771,"authors":776,"heroImage":772,"date":777,"body":778,"category":779,"tags":780},[755,757],"2024-04-10","GitLab 17.0 is coming on May 16. This version, a major release, will include many exciting improvements to GitLab, but also removes some deprecated features. Visit the [Deprecations page](https://docs.gitlab.com/ee/update/deprecations?removal_milestone=17.0) to see what is scheduled for removal in 17.0 and keep reading for an overview of the highest impact removals.\n\nAdditionally, this year we are introducing three windows during which we expect breaking changes to be enabled on GitLab.com:\n\n- 2024-04-22 09:00 UTC to 2024-04-24 22:00 UTC\n\n- 2024-04-29 09:00 UTC to 2024-05-01 22:00 UTC\n\n- 2024-05-06 09:00 UTC to 2024-05-08 22:00 UTC\n\n**Note:** Some breaking changes may fall slightly outside of these windows in exceptional circumstances.\n\n**Update:** We have created a [public issue](https://gitlab.com/gitlab-com/Product/-/issues/13310) with more details about which changes should land in which windows.\n\n## High-impact breaking changes in GitLab 17.0\n\nWe have identified the following high-impact removals in 17.0. We define “high impact” as potentially disrupting critical workflows, such as continuous integration (CI), continuous deployment (CD), compliance, or the availability of the instance. That’s why we suggest you should prioritize these breaking changes first when preparing for the major release. While you can find detailed information on each breaking change in the linked documentation, we’ve provided some notes about the affected features and potential impact in this overview.\n\n### Self-managed deployment\n- [Postgres 13 deprecated](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#postgresql-13-deprecated)\n    - Impacts all self-managed customers. Failing to upgrade to Postgres 14 will break the deployment.\n    - Postgres 14 is already supported starting from GitLab 16.2.0.\n- [omniauth-azure-oauth2 gem is deprecated](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#omniauth-azure-oauth2-gem-is-deprecated)\n    - Impacts self-managed customers who use the `omniauth-azure-oauth2` provider for authentication.\n    - Without migration to `omniauth_openid_connect`, users will no longer be able to sign in using the Azure login button.\n- [Min concurrency and max concurrency in Sidekiq options](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#min-concurrency-and-max-concurrency-in-sidekiq-options)\n    - Impacts GitLab deployments that have `sidekiq['min_concurrency']` and `sidekiq['max_concurrency']` configured in their `gitlab.rb`.\n    - Failure to migrate will break the deployment.\n\n###  CI\n- [Registration tokens and server-side runner arguments in POST /api/v4/runners endpoint](https://docs.gitlab.com/ee/update/deprecations.html#registration-tokens-and-server-side-runner-arguments-in-post-apiv4runners-endpoint)\n    - Impacts custom automations that provision runners.\n    - Potentially breaks CI pipelines by disabling runner provisioning.\n- [File type variable expansion fixed in downstream pipelines](https://docs.gitlab.com/ee/update/deprecations.html#file-type-variable-expansion-fixed-in-downstream-pipelines)\n    - Impacts pipelines using [downstream pipelines](https://docs.gitlab.com/ee/ci/pipelines/downstream_pipelines.html) passing [File-type variables](https://docs.gitlab.com/ee/ci/pipelines/downstream_pipelines.html#pass-cicd-variables-to-a-downstream-pipeline) to the downstream pipeline.\n    - Changed behavior may break the downstream pipeline due to a change in variable content.\n\n- [after_script keyword will run for canceled jobs](https://docs.gitlab.com/ee/update/deprecations.html#after_script-keyword-will-run-for-cancelled-jobs)\n    - Impacts pipelines using the [after_script](https://docs.gitlab.com/ee/ci/yaml/#after_script) keyword.\n    - Changed behavior may break pipelines or cause unexpected pipeline results.\n- [Old versions of JSON web tokens are deprecated](https://docs.gitlab.com/ee/update/deprecations.html#old-versions-of-json-web-tokens-are-deprecated), [HashiCorp Vault integration will no longer use CI_JOB_JWT by default](https://docs.gitlab.com/ee/update/deprecations.html#hashicorp-vault-integration-will-no-longer-use-ci_job_jwt-by-default), and [JWT /-/jwks instance endpoint is deprecated](https://docs.gitlab.com/ee/update/deprecations.html#jwt--jwks-instance-endpoint-is-deprecated)\n    - Impacts pipelines relying on the `CI_JOB_JWT or CI_JOB_JWT_V2` CI variables.\n    - The removal of the variable may break Vault integrations or otherwise cause pipelines to fail.\n\n### CD\n- [The pull-based deployment features of the GitLab agent for Kubernetes is deprecated](https://docs.gitlab.com/ee/update/deprecations.html#the-pull-based-deployment-features-of-the-gitlab-agent-for-kubernetes-is-deprecated)\n    - Impacts projects using the GitLab agent for Kubernetes for deployments.\n    - The change may break CD workflows relying on the GitLab agent for Kubernetes.\n    - The agent itself is not deprecated and still used for a number of features, like communicating with the cluster, its API endpoints and pushing information about events in the cluster to GitLab.\n\n- [Agent for Kubernetes option ca-cert-file renamed](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#agent-for-kubernetes-option-ca-cert-file-renamed)\n    - Impacts customers installing Kubernetes agents behind a self-signed certificate.\n    - The change may impact CD workflows relying on connecting Kubernetes clusters to GitLab via the agent.\n\n### Package\n- [npm package uploads now occur asynchronously](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#npm-package-uploads-now-occur-asynchronously)\n    - Impacts projects [publishing npm or Yarn packages to the GitLab registry](https://docs.gitlab.com/ee/user/packages/npm_registry/#publish-to-gitlab-package-registry).\n    - Due to the asynchronous upload, pipelines may break that expect packages to be available as soon as they are published.\n\n- [Dependency Proxy: Access tokens to have additional scope checks](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#dependency-proxy-access-tokens-to-have-additional-scope-checks)\n    - Impacts projects using the Dependency Proxy with a group access token or personal access token that have insufficient [scopes](https://docs.gitlab.com/ee/user/packages/dependency_proxy/#authenticate-with-the-dependency-proxy).\n    - Because tokens without the required scopes will fail, this may break pipelines by rejecting docker login and docker pull requests.\n\n- [Maven repository group permissions](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#breaking-change-to-the-maven-repository-group-permissions)\n    - Impacts projects using the Maven repository at the group level where user permissions are not set up correctly.\n    - Because users without correct permissions will fail to access the requested packages, this change may break pipelines for those users.\n\n### GitLab.com\n- [Upgrading the operating system version of GitLab SaaS runners on Linux](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#upgrading-the-operating-system-version-of-gitlab-saas-runners-on-linux)\n    - Impacts pipelines using `saas-linux-*-amd64` tagged shared runners on GitLab.com that use outdated Docker-in-Docker or Kaniko versions.\n    - The outdated versions will be unable to detect the container runtime and fail, breaking the pipeline.\n\n- [Deprecating Windows Server 2019 in favor of 2022](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#deprecating-windows-server-2019-in-favor-of-2022)\n    - Impacts pipelines using `shared-windows` and `windows-1809` tagged shared runners on GitLab.com.\n    - Affected jobs will not be picked up by runners, thus blocking the pipeline.\n    - You can identify affected jobs by [searching](https://docs.gitlab.com/ee/user/search/exact_code_search.html) for the deprecated tags in your .yml files.\n\n- [Removal of tags from small SaaS runners on Linux](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#removal-of-tags-from-small-saas-runners-on-linux)\n    - Impacts pipelines using shared runners tagged `docker`, `east-c`, `gce`, `git-annex`, `linux`, `mongo`, `mysql`, `ruby`, or  `shared` on GitLab.com.\n    - Affected jobs will not be picked up by runners, thus blocking the pipeline.\n    - You can identify affected jobs by [searching](https://docs.gitlab.com/ee/user/search/exact_code_search.html) for the deprecated tags in your .yml files.\n\n### Ultimate only\n- [Security policy fields newly_detected and match_on_inclusion are deprecated](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#security-policy-field-newly_detected-is-deprecated)\n    - Impacts groups and projects that have merge request approval policies (previously: scan result policies) enabled and use the deprecated keywords.\n    - Without migration, the rules enforced by the policies will stop working, causing potential compliance violations.\n\n- [Required Pipeline Configuration is deprecated](https://docs.gitlab.com/ee/update/deprecations.html?removal_milestone=17.0#required-pipeline-configuration-is-deprecated)\n    - Impacts Ultimate self-managed customers using required pipeline configuration.\n    - Without migration, the required configuration will no longer be used by projects, impacting all pipelines that are run on the instance.\n\n- [Proxy-based DAST is deprecated](https://docs.gitlab.com/ee/update/deprecations.html#proxy-based-dast-deprecated)\n    - Impacts projects that are using DAST with the variable `DAST_BROWSER_SCAN` set to false.\n    - Without migration, DAST scans in existing pipelines will fail.\n    - Follow the recommended changes outlined in the [DAST migration guide](https://docs.gitlab.com/ee/user/application_security/dast/proxy_based_to_browser_based_migration_guide.html) to ensure DAST can continue scanning your applications.\n\n## See all removals in GitLab 17.0\n\nFor more detailed information and to see all the removals coming up in this year's major release, please visit the [Deprecations page](https://docs.gitlab.com/ee/update/deprecations?removal_milestone=17.0).\n\n> Live demo! Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Register today!](https://about.gitlab.com/seventeen/)\n","bulletin-board",[9,717,781],"releases",{"slug":783,"featured":91,"template":698},"a-guide-to-the-high-impact-breaking-changes-in-gitlab-17-0","content:en-us:blog:a-guide-to-the-high-impact-breaking-changes-in-gitlab-17-0.yml","A Guide To The High Impact Breaking Changes In Gitlab 17 0","en-us/blog/a-guide-to-the-high-impact-breaking-changes-in-gitlab-17-0.yml","en-us/blog/a-guide-to-the-high-impact-breaking-changes-in-gitlab-17-0",{"_path":789,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":790,"content":796,"config":806,"_id":808,"_type":14,"title":809,"_source":16,"_file":810,"_stem":811,"_extension":19},"/en-us/blog/accelerate-code-reviews-with-gitlab-duo-and-amazon-q",{"title":791,"description":792,"ogTitle":791,"ogDescription":792,"noIndex":6,"ogImage":793,"ogUrl":794,"ogSiteName":685,"ogType":686,"canonicalUrls":794,"schema":795},"Accelerate code reviews with GitLab Duo and Amazon Q","Use AI-powered agents to optimize code reviews by automatically analyzing merge requests and providing comprehensive feedback on bugs, readability, and coding standards.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750096976/Blog/Hero%20Images/Blog/Hero%20Images/Screenshot%202024-11-27%20at%204.55.28%E2%80%AFPM_4VVz6DgGBOvbGY8BUmd068_1750096975734.png","https://about.gitlab.com/blog/accelerate-code-reviews-with-gitlab-duo-and-amazon-q","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Accelerate code reviews with GitLab Duo and Amazon Q\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Cesar Saavedra\"}],\n        \"datePublished\": \"2025-06-02\",\n      }",{"title":791,"description":792,"authors":797,"heroImage":793,"date":799,"body":800,"category":801,"tags":802},[798],"Cesar Saavedra","2025-06-02","Code reviews are critical for catching bugs, improving code readability, and maintaining coding standards, but they can also be a major bottleneck in your workflow. When you're trying to ship features quickly, waiting for multiple team members to review your code can be frustrating. The back-and-forth discussions, the scheduling conflicts, and the time it takes to get everyone aligned can stretch what should be a simple review into days or even weeks.\n\nHere's where [GitLab Duo with Amazon Q](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws/), our new offering that delivers agentic AI throughout the software development lifecycle for AWS customers, comes in to transform your review process. This intelligent, AI-powered solution can perform comprehensive code reviews for you in a fraction of the time it would take your human colleagues. By leveraging advanced agentic AI capabilities, GitLab Duo with Amazon Q streamlines your entire review workflow without sacrificing the quality and thoroughness you need. Think of it as having an always-available, highly skilled reviewer who can instantly analyze your code and provide actionable feedback.\n\n## How it works: Launching a code review\n\nSo how does GitLab Duo with Amazon Q actually work? Let's say you've just finished working on a feature and created a merge request with multiple code updates. Instead of pinging your teammates and waiting for their availability, you simply enter a quick command in the comment section: \"/q review\". That's it – just those two words trigger the AI to spring into action.\n\n![Triggering a code review using GitLab Duo with Amazon Q](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097002/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097002096.png)\n\nOnce you've entered the command, Amazon Q Service immediately begins analyzing your code changes. You'll see a confirmation that the review is underway, and within moments, the AI is examining every line of your updates, checking for potential issues across multiple dimensions.\nWhen the review completes, you receive comprehensive feedback that covers all the bases: bug detection, readability improvements, syntax errors, and adherence to your team's coding standards. The AI doesn't just point out problems, it provides context and suggestions for fixing them, making it easy for you to understand what needs attention and why.\n\nThe beauty of this agentic AI approach is that it handles the heavy lifting of code review while you focus on what matters most: building great software. You get the benefits of thorough code reviews — better bug detection, consistent coding standards, and improved code quality — without the time sink. Your deployment times shrink dramatically because you're no longer waiting in review queues, and your entire team becomes more productive.\n\n## Why use GitLab Duo with Amazon Q?\n\nGitLab Duo with Amazon Q transforms your development workflow in the following ways:\n- Lightning-fast code reviews that don't compromise on quality\n- Consistent application of coding standards across your entire codebase\n- Immediate feedback that helps you fix issues before they reach production\n- Reduced deployment times that let you ship features faster\n- More time for your team to focus on creative problem-solving instead of repetitive reviews\n\nReady to see this game-changing feature in action? Watch how GitLab Duo with Amazon Q can revolutionize your code review process:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/4gFIgyFc02Q?si=GXVz--AIrWiwzf-I\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n> To learn more about GitLab Duo with Amazon Q visit us at an upcoming [AWS Summit in a city near you](https://about.gitlab.com/events/aws-summits/) or [reach out to your GitLab representative](https://about.gitlab.com/partners/technology-partners/aws/#form).\n> \n> And make sure to join the GitLab 18 virtual launch event to learn about our agentic AI plans and more. [Register today!](https://about.gitlab.com/eighteen/)","ai-ml",[803,9,804,717,695,284,805,719],"AI/ML","code review","AWS",{"slug":807,"featured":91,"template":698},"accelerate-code-reviews-with-gitlab-duo-and-amazon-q","content:en-us:blog:accelerate-code-reviews-with-gitlab-duo-and-amazon-q.yml","Accelerate Code Reviews With Gitlab Duo And Amazon Q","en-us/blog/accelerate-code-reviews-with-gitlab-duo-and-amazon-q.yml","en-us/blog/accelerate-code-reviews-with-gitlab-duo-and-amazon-q",{"_path":813,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":814,"content":820,"config":827,"_id":829,"_type":14,"title":830,"_source":16,"_file":831,"_stem":832,"_extension":19},"/en-us/blog/agentic-ai-guides-and-resources",{"title":815,"description":816,"ogTitle":815,"ogDescription":816,"noIndex":6,"ogImage":817,"ogUrl":818,"ogSiteName":685,"ogType":686,"canonicalUrls":818,"schema":819},"Agentic AI guides and resources","Learn everything you need to know about agentic AI, including what it is, how it works, why it levels up your DevSecOps environment, and best practices for implementation.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749658912/Blog/Hero%20Images/blog-image-template-1800x945__20_.png","https://about.gitlab.com/blog/agentic-ai-guides-and-resources","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Agentic AI guides and resources\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"GitLab\"}],\n        \"datePublished\": \"2025-05-07\",\n      }",{"title":815,"description":816,"authors":821,"heroImage":817,"date":823,"body":824,"category":801,"tags":825,"updatedDate":826},[822],"GitLab","2025-09-26","## Defining agentic AI\n\nAgentic AI is a type of artificial intelligence that leverages advanced language models and natural language processing to take independent action. Unlike traditional generative AI tools that require constant human direction, these systems can understand requests, make decisions, and execute multi-step plans to achieve goals. They tackle complex tasks by breaking them into manageable steps and employ adaptive learning to modify their approach when facing challenges.\n\n[Learn more about agentic AI](https://about.gitlab.com/topics/agentic-ai/)\n\n## Agentic AI insights\n- [Transform development with agentic AI: The enterprise guide](https://about.gitlab.com/the-source/ai/transform-development-with-agentic-ai-the-enterprise-guide/)\n- [GitLab 18.4: AI-native development with automation and insight](https://about.gitlab.com/blog/gitlab-18-4-ai-native-development-with-automation-and-insight/) With GitLab 18.4, teams create custom agents, unlock Knowledge Graph context, and auto-fix pipelines so developers stay focused and in flow.\n- [GitLab 18.3: Expanding AI orchestration in software engineering](https://about.gitlab.com/blog/gitlab-18-3-expanding-ai-orchestration-in-software-engineering/) Learn how we're advancing human-AI collaboration with enhanced Flows, enterprise governance, and seamless tool integration.\n- [GitLab Duo Agent Platform Public Beta: Next-gen AI orchestration and more](https://about.gitlab.com/blog/gitlab-duo-agent-platform-public-beta/) — Introducing the DevSecOps orchestration platform designed to unlock asynchronous collaboration between developers and AI agents.\n- [GitLab Duo Agent Platform: What's next for intelligent DevSecOps](https://about.gitlab.com/blog/gitlab-duo-agent-platform-what-is-next-for-intelligent-devsecops/) — GitLab Duo Agent Platform, a DevSecOps orchestration platform for humans and AI agents, leverages agentic AI for collaboration across the software development lifecycle.\n- [From vibe coding to agentic AI: A roadmap for technical leaders](https://about.gitlab.com/the-source/ai/from-vibe-coding-to-agentic-ai-a-roadmap-for-technical-leaders/) — Discover how to implement vibe coding and agentic AI in your development process to increase productivity while maintaining code quality and security.\n- [Emerging agentic AI trends reshaping software development](https://about.gitlab.com/the-source/ai/emerging-agentic-ai-trends-reshaping-software-development/) — Discover how agentic AI transforms development from isolated coding to intelligent workflows that enhance productivity while maintaining security.\n- [Agentic AI: Unlocking developer potential at scale](https://about.gitlab.com/the-source/ai/agentic-ai-unlocking-developer-potential-at-scale/) — Explore how agentic AI is transforming software development, moving beyond code completion to create AI partners that proactively tackle complex tasks.\n- [Agentic AI, self-hosted models, and more: AI trends for 2025](https://about.gitlab.com/the-source/ai/ai-trends-for-2025-agentic-ai-self-hosted-models-and-more/) — Discover key trends in AI for software development, from on-premises model deployments to intelligent, adaptive AI agents.\n- [How agentic AI unlocks platform engineering potential](https://about.gitlab.com/the-source/ai/how-agentic-ai-unlocks-platform-engineering-potential/) — Explore how agentic AI elevates platform engineering by automating complex workflows and scaling standardization.\n\n## The agentic AI ecosystem\n- [AI-driven code analysis: The new frontier in code security](https://about.gitlab.com/topics/agentic-ai/ai-code-analysis/) \n- [DevOps automation & AI agents](https://about.gitlab.com/topics/agentic-ai/devops-automation-ai-agents/)\n- [AI-augmented software development: Agentic AI for DevOps](https://about.gitlab.com/topics/agentic-ai/ai-augmented-software-development/)\n\n## Best practices for implementing agentic AI\n\n- [Implementing effective guardrails for AI agents](https://about.gitlab.com/the-source/ai/implementing-effective-guardrails-for-ai-agents/) — Discover essential security guardrails for AI agents in DevSecOps, from compliance controls and infrastructure protection to user access management.\n\n## GitLab's agentic AI offerings\n\n### GitLab Duo with Amazon Q\n\n- [GitLab Duo with Amazon Q: Agentic AI optimized for AWS generally available](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws/) — The comprehensive AI-powered DevSecOps platform combined with the deepest set of cloud computing capabilities speeds dev cycles, increases automation, and improves code quality.\n- [DevSecOps + Agentic AI: Now on GitLab Self-Managed Ultimate on AWS](https://about.gitlab.com/blog/devsecops-agentic-ai-now-on-gitlab-self-managed-ultimate-on-aws/) — Start using AI-powered, DevSecOps-enhanced agents in your AWS GitLab Self-Managed Ultimate instance. Enjoy the benefits of GitLab Duo and Amazon Q in your organization.\n- [GitLab Duo with Amazon Q partner page](https://about.gitlab.com/partners/technology-partners/aws/)\n\nWatch GitLab Duo with Amazon Q in action:\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1075753390?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Technical Demo: GitLab Duo with Amazon Q\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n#### Guided tour\nClick on the image to start a tour of GitLab Duo with Amazon Q:\n\n[![GitLab Duo with Amazon Q interactive tour](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673568/Blog/Content%20Images/Screenshot_2025-05-07_at_7.24.45_AM.png)](https://gitlab.navattic.com/duo-with-q)\n\n#### GitLab Duo with Amazon Q tutorials\n- [Enhance application quality with AI-powered test generation](https://about.gitlab.com/blog/enhance-application-quality-with-ai-powered-test-generation/) — Learn how GitLab Duo with Amazon Q improves the QA process by automatically generating comprehensive unit tests.\n- [GitLab Duo + Amazon Q: Transform ideas into code in minutes](https://about.gitlab.com/blog/gitlab-duo-amazon-q-transform-ideas-into-code-in-minutes/) — The new GitLab Duo with Amazon Q integration analyzes your issue descriptions and automatically generates complete working code solutions, accelerating development workflows.\n- [Accelerate code reviews with GitLab Duo and Amazon Q](https://about.gitlab.com/blog/accelerate-code-reviews-with-gitlab-duo-and-amazon-q/) — Use AI-powered agents to optimize code reviews by automatically analyzing merge requests and providing comprehensive feedback on bugs, readability, and coding standards.\n- [Speed up code reviews: Let AI handle the feedback implementation](https://about.gitlab.com/blog/speed-up-code-reviews-let-ai-handle-the-feedback-implementation/) — Discover how GitLab Duo with Amazon Q automates the implementation of code review feedback through AI, transforming a time-consuming manual process into a streamlined workflow.\n\n### GitLab Duo Agent Platform\n- [GitLab Duo Chat gets agentic AI makeover](https://about.gitlab.com/blog/gitlab-duo-chat-gets-agentic-ai-makeover/) — Our new Duo Chat experience, currently an experimental release, helps developers onboard to projects, understand assignments, implement changes, and more.\nWatch GitLab Duo Agent Platform in action:\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1095679084?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media; web-share\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Agent Platform Demo Clip\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n#### GitLab Agent Platform tutorials and use cases\n- [Vibe coding with GitLab Duo Agent Platform: Issue to MR Flow](https://about.gitlab.com/blog/vibe-coding-with-gitlab-duo-agent-platform-issue-to-mr-flow/) - Learn how to update your application in minutes with our newest agent Flow that takes developers from idea to code.\n- [Get started with GitLab Duo Agentic Chat in the web UI](https://about.gitlab.com/blog/get-started-with-gitlab-duo-agentic-chat-in-the-web-ui/) - Learn about our new GitLab Duo AI feature that automates tasks by breaking down complex problems and executing operations across multiple sources.\n- [Custom rules in GitLab Duo Agentic Chat for greater developer efficiency](https://about.gitlab.com/blog/custom-rules-duo-agentic-chat-deep-dive/) — Discover how AI can understand your codebase, follow your conventions, and generate production-ready code with minimal review cycles.\n- [Accelerate learning with GitLab Duo Agent Platform](https://about.gitlab.com/blog/accelerate-learning-with-gitlab-duo-agent-platform/) — Learn how agentic AI helped generate comprehensive gRPC documentation in minutes, not hours.\n- [Fast and secure AI agent deployment to Google Cloud with GitLab](https://about.gitlab.com/blog/fast-and-secure-ai-agent-deployment-to-google-cloud-with-gitlab/)\n## Learn more with GitLab University\n- [Get Started with GitLab Duo coursework](https://university.gitlab.com/pages/ai)\n- [GitLab Duo Enterprise Learning Path](https://university.gitlab.com/learning-paths/gitlab-duo-enterprise-learning-path)\n\n## More AI resources\n- [2024 Global DevSecOps Survey: Navigating AI maturity in DevSecOps](https://about.gitlab.com/developer-survey/2024/ai/)\n- [The Role of AI in DevOps](https://about.gitlab.com/topics/devops/the-role-of-ai-in-devops/)\n- [The latest AI/ML articles from GitLab](https://about.gitlab.com/blog/categories/ai-ml/)\n- [GitLab Duo](https://about.gitlab.com/gitlab-duo/)\n- [GitLab Duo Agent Platform](https://about.gitlab.com/gitlab-duo/agent-platform/)",[803,9,719],"2025-09-29",{"slug":828,"featured":91,"template":698},"agentic-ai-guides-and-resources","content:en-us:blog:agentic-ai-guides-and-resources.yml","Agentic Ai Guides And Resources","en-us/blog/agentic-ai-guides-and-resources.yml","en-us/blog/agentic-ai-guides-and-resources",{"_path":834,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":835,"content":841,"config":848,"_id":850,"_type":14,"title":851,"_source":16,"_file":852,"_stem":853,"_extension":19},"/en-us/blog/ai-native-gitlab-premium-transform-higher-education-software-development",{"title":836,"description":837,"ogTitle":836,"ogDescription":837,"noIndex":6,"ogImage":838,"ogUrl":839,"ogSiteName":685,"ogType":686,"canonicalUrls":839,"schema":840},"AI-native GitLab Premium: Transform higher education software development","The DevSecOps platform's enterprise-grade features for academic workflows, data protection, and support ensure better collaboration, security, and efficiency.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659537/Blog/Hero%20Images/display-article-image-0679-1800x945-fy26.png","https://about.gitlab.com/blog/ai-native-gitlab-premium-transform-higher-education-software-development","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"AI-native GitLab Premium: Transform higher education software development\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Jessica Hurwitz\"},{\"@type\":\"Person\",\"name\":\"Elisabeth Burrows\"}],\n        \"datePublished\": \"2025-06-10\",\n      }",{"title":836,"description":837,"authors":842,"heroImage":838,"date":845,"body":846,"category":717,"tags":847},[843,844],"Jessica Hurwitz","Elisabeth Burrows","2025-06-10","Educational institutions increasingly rely on modern software development practices to support teaching, research, and administrative functions. As development needs grow more complex in university and college environments, GitLab Premium with Duo provides essential capabilities that address the unique challenges faced by higher education – particularly around open source development, remote collaboration, and enterprise-grade security.\n\nGitLab's comprehensive, intelligent DevSecOps platform delivers value that extends far beyond fundamental version control. Built on an open source foundation with enterprise-grade features, GitLab Premium helps prevent costly security incidents involving student data, provides cloud-based development environments for distributed teams, and offers the professional support that educational institutions need for mission-critical systems. And now [Premium includes GitLab Duo AI essentials](https://about.gitlab.com/blog/gitlab-premium-with-duo/) Code Suggestions and Chat at no additional cost.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1083723619?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"GitLab Premium with Duo Core\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n## The unique development environment in higher education\n\nUniversities and colleges operate in a distinctly challenging technical environment. Development teams must support multidisciplinary collaboration across technical and non-technical departments while managing vast amounts of sensitive data – from student records and financial information to research findings and faculty evaluations.\n\nMost institutions face these challenges with limited IT resources, yet must support thousands of concurrent users across numerous projects and research initiatives. Research integrity requirements add another layer of complexity, as development work often needs to maintain traceability and reproducibility standards.\n\n## Premium solutions for educational institutions\n\nGitLab Premium with Duo has the functionality that higher education needs.\n\n### Enhanced collaboration and workflow capabilities\n\nCross-departmental projects are common in educational settings – from multi-department research initiatives to custom module development for systems like Ellucian Banner, an enterprise resource planning application used by higher education. These complex projects require sophisticated workflow management that goes beyond basic version control. \n\nGitLab Premium addresses these challenges with powerful collaboration and project visualization features, including epics, roadmaps, and advanced Kanban boards for Agile development workflows. When you assign multiple approvers to certain merge requests and protected branches, you ensure higher code quality and accountability across teams. These tools allow institutions to coordinate work across departments while aligning with institution-wide objectives – essential for managing multiphase campus technology initiatives.\n\nIn Australia, [Deakin University’s](https://about.gitlab.com/customers/deakin-university/) enablement team uses GitLab to build standardized processes and reusable templates — such as custom merge request templates, templated build pipelines, and a security and compliance framework — that can be shared with the broader university community and citizen developers, driving innovation and collaboration both inside the university and with key partners. “We were trying to bring in a community of practice and help it thrive for quite some time, but we were never successful until we had this tool,” said Aaron Whitehand, director of Digital Enablement at Deakin University.\n\n> #### Read more about [how Deakin University uses GitLab to drive improvements](https://about.gitlab.com/customers/deakin-university/) in collaboration and productivity, including a 60% reduction in manual tasks.\n\n### Advanced data protection and governance\n\nEducational institutions generate and manage vast amounts of data, ranging from student records and financial information to research findings and faculty evaluations. The security stakes are particularly high. The [2023 MOVEit breach](https://universitybusiness.com/in-just-3-months-this-data-breach-has-compromised-nearly-900-institutions/), which spanned three months and compromised approximately 900 educational institutions, exposed the sensitive information of more than 62 million people. This demonstrates the critical need for proactive security measures integrated directly into higher education development workflows. \n\nVulnerability scanning stops code releases that contain security risks, enabling institutions to establish and enforce governance protocols that protect sensitive information. These capabilities help universities implement proper access controls and permission structures for research databases, creating a secure framework where authorized researchers maintain appropriate access – effectively balancing robust protection with necessary collaboration.\n\nGitLab is built from the ground up to secure your source code. Scalable Git-based repositories, granular access controls, and built-in compliance features eliminate bottlenecks in your workflow while meeting security requirements. GitLab Premium provides audit tracking and compliance capabilities essential for educational environments. Complete audit trails capture detailed logs of all code changes, access attempts, and system modifications with timestamps and user attribution. Full change management documentation ensures traceability of who made what changes, when, and why – critical for research integrity – while access control auditing monitors repository access and permissions changes. \n\n### Cloud-based development environments and remote collaboration\n\nModern educational institutions require flexible development environments that support distributed teams, remote learning scenarios, and diverse technical requirements. GitLab Premium provides:\n\n* **[GitLab Workspaces](https://docs.gitlab.com/user/workspace/):** Cloud-based development environments accessible from any device  \n* **[Web IDE integration](https://docs.gitlab.com/user/project/web_ide/):** Browser-based coding with full GitLab feature integration  \n* **[Container-based development](https://about.gitlab.com/blog/build-and-run-containers-in-remote-development-workspaces/):** Consistent, reproducible development environments across different projects and user groups\n\nThese capabilities are particularly valuable for supporting remote and hybrid learning models, enabling students and researchers to access standardized development environments regardless of their physical location or local hardware constraints.\n\n### Professional support for critical systems\n\nSmall IT teams in educational settings often support large, complex infrastructure with minimal resources. Reaching out to user forums for answers doesn't always mean you'll get an accurate reply and isn't efficient for large teams. GitLab Premium includes dedicated professional support, providing faster issue resolution and upgrade assistance during critical periods like class enrollment or research deadlines.\n\nThis minimizes downtime for critical services and ensures continuity of operations during peak usage periods, giving stretched IT departments the enterprise-grade reliability they need for essential academic systems.\n\n### Built on open source with enterprise capabilities\n\nOpen source software is developed collaboratively in a public manner, with source code freely available for anyone to view, modify, and distribute. This development model fosters innovation through community contributions and ensures transparency in how software functions. GitLab's open source foundation resonates strongly with educational institutions' values around collaboration, transparency, and community contribution. GitLab Premium features extend this foundation with enterprise-grade capabilities while maintaining the ability to contribute back to the open source ecosystem.\n\nKey open source advantages include:\n\n* **Transparency:** Complete visibility into platform capabilities and security measures – you can examine exactly how the software works  \n* **Community contribution:** Ability to contribute improvements back to the broader community and benefit from global developer expertise  \n* **Vendor independence:** Reduced lock-in risk with open source alternatives and the freedom to modify code as needed  \n* **Co-creation opportunities:** Collaborative development with the broader community, including other educational institutions, to build shared solutions\n\n### AI assistant for software development tasks\n\nGitLab Premium with [Duo](https://about.gitlab.com/gitlab-duo/) brings powerful AI-native capabilities directly into the development workflow, including:  \n* [**Code Suggestions**](https://docs.gitlab.com/user/project/repository/code_suggestions/), which provides real-time code completion and suggestions, helping developers write code faster and more efficiently  \n* [**Chat**](https://docs.gitlab.com/user/gitlab_duo_chat/), which allows team members to get instant answers to questions, troubleshoot issues, and access documentation directly within the GitLab environment\n\nThese AI tools significantly enhance productivity, reduce errors, and streamline collaboration, making GitLab Premium an even more valuable asset for software development teams in higher education.\n\n### Transparency at the core\n\nHigher education institutions handle incredibly sensitive data — from student records and research findings to proprietary academic work and federal grant information. \n\nThe [GitLab AI Transparency Center ](https://about.gitlab.com/ai-transparency-center/)demonstrates our commitment to transparency, accountability, and protection of customer data and intellectual property, providing the privacy guarantees that educational institutions require.\n\nGitLab launched the AI Transparency Center to help customers, community, and team members better understand how GitLab upholds ethics and transparency in our AI-powered features. \n\nOur publicly available documentation highlights the comprehensive measures we take to protect your institution's data and intellectual property. [GitLab's AI Ethics Principles for Product Development](https://handbook.gitlab.com/handbook/legal/ethics-compliance-program/ai-ethics-principles/) guide us as we continue to build and evolve our AI functionality, helping higher education organizations harness the promise of AI while maintaining complete control and oversight of their most valuable information assets.\n\n## Get started with GitLab Premium today\n\nFor educational institutions, GitLab Premium with Duo represents a strategic technical investment that combines the benefits of open source development with enterprise-grade, AI-native capabilities. By providing professional-grade tools ready for the challenges familiar to the complex technical environment of higher education, GitLab Premium with Duo helps institutions address security vulnerabilities, streamline development workflows, and maintain the reliable infrastructure that academic and research operations depend on.\n\n> [Learn more about GitLab for Public Sector](https://about.gitlab.com/solutions/public-sector/) or  [speak to our sales team today](https://about.gitlab.com/sales/).\n\n## Read more\n\n- [Unlocking AI for every GitLab Premium and Ultimate customer](https://about.gitlab.com/blog/gitlab-premium-with-duo/)\n- [GitLab Duo Code Suggestions](https://docs.gitlab.com/user/project/repository/code_suggestions/)\n- [GitLab Duo Chat](https://docs.gitlab.com/user/gitlab_duo_chat/)",[565,9,717,695,186],{"slug":849,"featured":91,"template":698},"ai-native-gitlab-premium-transform-higher-education-software-development","content:en-us:blog:ai-native-gitlab-premium-transform-higher-education-software-development.yml","Ai Native Gitlab Premium Transform Higher Education Software Development","en-us/blog/ai-native-gitlab-premium-transform-higher-education-software-development.yml","en-us/blog/ai-native-gitlab-premium-transform-higher-education-software-development",{"_path":855,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":856,"content":862,"config":868,"_id":870,"_type":14,"title":871,"_source":16,"_file":872,"_stem":873,"_extension":19},"/en-us/blog/ai-powered-growth-transform-every-stage-of-software-delivery",{"title":857,"description":858,"ogTitle":857,"ogDescription":858,"noIndex":6,"ogImage":859,"ogUrl":860,"ogSiteName":685,"ogType":686,"canonicalUrls":860,"schema":861},"AI-powered growth: Transform every stage of software delivery","Find out how the latest developments in the GitLab AI-powered DevSecOps Platform boost efficiency throughout the software development lifecycle.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749669095/Blog/Hero%20Images/gitlabduo.png","https://about.gitlab.com/blog/ai-powered-growth-transform-every-stage-of-software-delivery","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"AI-powered growth: Transform every stage of software delivery\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Laurena Alves\"}],\n        \"datePublished\": \"2024-01-29\",\n      }",{"title":857,"description":858,"authors":863,"heroImage":859,"date":865,"body":866,"category":801,"tags":867},[864],"Laurena Alves","2024-01-29","It’s clear artificial intelligence (AI) is top of mind for organizations and their software development teams. With developers and security teams forced to focus so much of their time on repetitive, mundane tasks, of course they’re looking for ways to ease that work and carve out more time in their schedules.\n\nAccording to the [GitLab 2023 State of AI in Software Development report](https://about.gitlab.com/developer-survey/), developers noted spending 25% of their total work time writing code, with the rest spent improving existing code, understanding existing code, testing, maintaining code, and identifying and mitigating security vulnerabilities. As organizations incorporate AI into their DevSecOps processes, they'll need to adopt AI solutions — such as vulnerability explanations, code change summaries, automated tests, and more — that will help them introduce efficiencies and boost productivity and collaboration across the entire software development lifecycle (SDLC).\n\nWe know AI, with the efficiencies it brings across the entire SDLC, has a role to play in saving developers’ time, improving their jobs, and helping speed the production of secure software. Over the past few months, GitLab has released a set of AI-powered features that will help you do just that. \n> Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Watch today!](https://about.gitlab.com/seventeen/)\n\n### GitLab Duo Pro: Unleash the power of AI\n\nFor Ultimate and Premium customers, [GitLab Duo Pro](https://about.gitlab.com/pricing/) is a new add-on package that brings together Code Suggestions, GitLab Duo Chat (Beta), and organizational control capabilities, ensuring that teams can take advantage of AI exactly where they need it. \n\nCode Suggestions helps automate routine coding tasks, allowing developers to focus on delivering higher-quality software at speed, while Chat helps teams write and understand code faster, get up to speed on the status of projects, and quickly learn GitLab. And the organizational controls included in GitLab Duo Pro give organizations fine-grained control over which users have access to AI, ensuring alignment with team workflows and security requirements. At an introductory price of $9 USD per user per month (valid until January 31), and then $19 USD per user per month after February 1, GitLab Duo Pro is an investment in developer efficiency and organizational governance. [Contact us today to get started](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/).\n\n### Remove roadblocks and ship faster\n\nDevelopers struggle with repetitive tasks slowing down their workflow and hindering innovation. Manual code reviews and testing take additional time, inhibiting release cycles. And inconsistent quality standards raise security alarms and can create potential vulnerabilities. This all stifles productivity and reduces developer happiness. \n\nWith the recent general availability of [Code Suggestions](https://about.gitlab.com/blog/gitlab-duo-code-suggestions-is-generally-available/), developers of all skill levels now have a virtual assistant that makes building software faster, more efficient, and more secure. The AI-powered feature now has enhancements like [Code Generation](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/), which generates code based on a natural language comment blocks, and [Repository X-ray](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/repository_xray.html), which improves the accuracy and relevance of code recommendations. \n\nCode Suggestions is your virtual pair programming buddy that can handle repetitive tasks, giving developers more time to focus on creating innovative products. Acting as a coding sidekick, this AI capability can automatically generate lines of code, complete functions, and even extend unfamiliar codebases. Code Suggestions supports 15 languages, including C++, C#, Go, Java, JavaScript, Python, PHP, Ruby, Rust, Scala, Kotlin, and TypeScript. It also integrates seamlessly into many popular IDEs like VS Code, Visual Studio, JetBrains’ suite of IDEs, and Neovim.\n\n### Gain the power of an AI assistant\n\nWhile Code Suggestions takes the spotlight, it's just one capability of [GitLab Duo](https://about.gitlab.com/gitlab-duo/), which is a full suite of [15 AI-assisted features](https://docs.gitlab.com/ee/user/ai_features.html) that go beyond code generation, catering to every stage of software development — from code generation and testing to security analysis and documentation. [GitLab Duo Chat](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html) is the foundational technology that connects users with our broader set of AI-powered GitLab Duo features. With Chat, users have a personal AI assistant that helps them answer questions, understand code, and even generate tests — all within the same IDE where development work happens. Together, these capabilities form a cohesive ecosystem. \n\nThese features are critical because they’ll help prevent bottlenecks from forming downstream as developers, who are getting a productivity boost from Code Suggestions, create more code faster. GitLab Duo offers targeted solutions, like Vulnerability Explanation and Merge Request Summary, to improve software quality and keep workflows moving.\n\nImportantly, GitLab prioritizes privacy and transparency in its AI approach. Customer code is not used for training, ensuring organizations have complete data ownership and control.\n\n> Live demo! Discover the future of AI-driven software development at our GitLab 17 virtual launch event. [Register today](https://about.gitlab.com/seventeen/)!\n",[803,9,717],{"slug":869,"featured":91,"template":698},"ai-powered-growth-transform-every-stage-of-software-delivery","content:en-us:blog:ai-powered-growth-transform-every-stage-of-software-delivery.yml","Ai Powered Growth Transform Every Stage Of Software Delivery","en-us/blog/ai-powered-growth-transform-every-stage-of-software-delivery.yml","en-us/blog/ai-powered-growth-transform-every-stage-of-software-delivery",{"_path":875,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":876,"content":882,"config":889,"_id":891,"_type":14,"title":892,"_source":16,"_file":893,"_stem":894,"_extension":19},"/en-us/blog/automate-tedious-coding-tasks-with-gitlab-duo-workflow",{"title":877,"description":878,"ogTitle":877,"ogDescription":878,"noIndex":6,"ogImage":879,"ogUrl":880,"ogSiteName":685,"ogType":686,"canonicalUrls":880,"schema":881},"Automate tedious coding tasks with GitLab Duo Workflow","See how agentic AI can reduce time spent on repetitive tasks, freeing you up to focus on developing innovative solutions and shipping the next big thing.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662465/Blog/Hero%20Images/GitLab_Duo_Workflow_Unified_Data_Store__1_.png","https://about.gitlab.com/blog/automate-tedious-coding-tasks-with-gitlab-duo-workflow","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Automate tedious coding tasks with GitLab Duo Workflow\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Jeff Park\"}],\n        \"datePublished\": \"2025-05-06\",\n      }",{"title":877,"description":878,"authors":883,"heroImage":879,"date":885,"body":886,"category":801,"tags":887},[884],"Jeff Park","2025-05-06","Working with large codebases often means spending significant time on repetitive tasks that, while necessary, don't really push your projects forward. The good news is that these tasks are great candidates to be completed with AI. Reducing the time spent on them will free you up to work on more important problems that you’re actually excited to tackle. With GitLab Duo Workflow, the time spent on these tasks will go from hours to minutes.\n\n[Duo Workflow](https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai/) is a powerful new agentic solution, currently in private beta, that lives in VS Code and is designed to help you complete complex development tasks. While many AI coding assistants are focused on helping developers write code, Duo Workflow understands your project structure, reads your files, and can make coordinated changes across your entire codebase.\n\nI created a demonstration that showcases how Duo Workflow can transform a tedious coding task into a streamlined process that saves you time and mental energy.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1081627484?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Automate tedious coding tasks with GitLab Duo Workflow\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n## The challenge: Implementing a new lint rule\n\nIn this demo, we tackle a common scenario that many developers face: implementing a new lint rule and then updating multiple files across the codebase to comply with this rule. The specific issue involves validation errors occurring in several project files that need to be addressed consistently.\n\nRather than manually identifying and modifying each affected file one by one – a process that could take hours depending on the size of your codebase – we'll see how Duo Workflow can:\n\n1. Read and understand the details from an issue\n2. Analyze the project structure to identify affected files\n3. Create a comprehensive plan to implement the necessary changes\n4. Draft a new lint rule to prevent future occurrences\n5. Make consistent code changes across all relevant files\n6. Stage the changes for your review before any commits are made\n\nA simple prompt initiates the process:\n\n\"Read through issue #1 in this project and submit code changes to resolve it. Be sure to look at each tool file and make all appropriate changes.\"\n\nFrom there, Duo Workflow takes over – reading the issue, analyzing the files, creating a plan, and implementing the solution – all while keeping me informed of its progress and reasoning.\n\n## Why this matters for your development process\n\nWhat's particularly powerful about Duo Workflow is how it maintains awareness of this wider context throughout the entire process. It's not just making text replacements based on a large language model's training data – it's understanding the code, making intelligent decisions, and proposing a complete solution that you maintain full control over.\n\nThis approach offers several key benefits:\n\n* **Consistency in implementation:** Apply changes uniformly across files\n* **Time savings:** Focus your energy on creative problem-solving rather than repetitive tasks\n* **Reduced context switching:** Complete complex tasks without leaving your IDE\n* **Keeping a human in the loop:** Review all proposed modifications before committing\n\n## What's next\n\nGitLab Duo Workflow is part of our work to bring AI-powered capabilities to every stage of the software development lifecycle. While this demo focuses on code editing, the same approach can be applied to various development tasks:\n\n* Implementing new features based on issue descriptions\n* Fixing bugs with comprehensive test coverage\n* Refactoring legacy code to modern standards\n* Creating documentation from codebase analysis\n\nWe believe that by automating repetitive tasks, Duo Workflow helps you focus on what matters most – solving interesting problems and creating innovative solutions for your users.\n\n> GitLab Duo Workflow is currently available in private beta for GitLab Ultimate customers. [Sign up for the waitlist today!](https://about.gitlab.com/gitlab-duo/agent-platform/)\n\n## Learn more\n- [Use GitLab Duo Workflow to improve application quality assurance](https://about.gitlab.com/blog/use-gitlab-duo-workflow-to-improve-application-quality-assurance/)\n- [Solving complex challenges with GitLab Duo Workflow](https://about.gitlab.com/blog/solving-complex-challenges-with-gitlab-duo-workflow/)\n- [GitLab Duo Workflow: Enterprise visibility and control for agentic AI](https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai/)\n- [Emerging agentic AI trends reshaping software development](https://about.gitlab.com/the-source/ai/emerging-agentic-ai-trends-reshaping-software-development/)\n- [What is agentic AI?](https://about.gitlab.com/topics/agentic-ai/)\n",[803,9,695,717,719,888],"workflow",{"slug":890,"featured":91,"template":698},"automate-tedious-coding-tasks-with-gitlab-duo-workflow","content:en-us:blog:automate-tedious-coding-tasks-with-gitlab-duo-workflow.yml","Automate Tedious Coding Tasks With Gitlab Duo Workflow","en-us/blog/automate-tedious-coding-tasks-with-gitlab-duo-workflow.yml","en-us/blog/automate-tedious-coding-tasks-with-gitlab-duo-workflow",{"_path":896,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":897,"content":903,"config":909,"_id":911,"_type":14,"title":912,"_source":16,"_file":913,"_stem":914,"_extension":19},"/en-us/blog/automating-agile-workflows-with-the-gitlab-triage-gem",{"title":898,"description":899,"ogTitle":898,"ogDescription":899,"noIndex":6,"ogImage":900,"ogUrl":901,"ogSiteName":685,"ogType":686,"canonicalUrls":901,"schema":902},"Automating Agile workflows with the gitlab-triage gem","Learn how to automate repetitive tasks like triaging issues and merge requests to free up valuable developer time in our \"Getting Started with GitLab\" series.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659525/Blog/Hero%20Images/blog-getting-started-with-gitlab-banner-0497-option4-fy25.png","https://about.gitlab.com/blog/automating-agile-workflows-with-the-gitlab-triage-gem","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Automating Agile workflows with the gitlab-triage gem\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"GitLab\"}],\n        \"datePublished\": \"2025-03-13\",\n      }",{"title":898,"description":899,"authors":904,"heroImage":900,"date":905,"body":906,"category":717,"tags":907},[822],"2025-03-13","*Welcome to our \"Getting started with GitLab\" series, where we help\nnewcomers get familiar with the GitLab DevSecOps platform.*\n\n\nThis post dives into the\n[`gitlab-triage`](https://gitlab.com/gitlab-org/ruby/gems/gitlab-triage)\ngem, a powerful tool that lets you create bots to automate your Agile\nworkflow. Say goodbye to manual tasks and hello to streamlined efficiency.\n\n\n## Why automate your workflow?\n\n\nEfficiency is key in software development. Automating repetitive tasks like\ntriaging issues and merge requests frees up valuable time for your team to\nfocus on what matters most: building amazing software.\n\n\nWith `gitlab-triage`, you can:\n\n\n* **Ensure consistency:** Apply labels and assign issues automatically based\non predefined rules.  \n\n* **Improve response times:** Get immediate feedback on new issues and merge\nrequests.  \n\n* **Reduce manual effort:** Eliminate the need for manual triage and\nupdates.  \n\n* **Boost productivity:** Free up your team to focus on coding and\ninnovation.\n\n\n## Introducing the `gitlab-triage` gem\n\n\nThe `gitlab-triage` gem is a Ruby library that allows you to create bots\nthat interact with your GitLab projects. These bots can automatically\nperform a wide range of actions, including:\n\n\n* **Labeling:** Automatically categorize issues and merge requests.  \n\n* **Commenting:** Provide updates, request information, or give feedback.  \n\n* **Assigning:** Assign issues and merge requests to the appropriate team\nmembers.  \n\n* **Closing:** Close stale or resolved issues and merge requests.  \n\n* **Creating:** Generate new issues based on specific events or\nconditions.  \n\n* **And much more!**\n\n\nCheck out the [`gitlab-triage` gem\nrepository](https://gitlab.com/gitlab-org/ruby/gems/gitlab-triage). \n\n\n## Setting up your triage bot\n\n\nLet's get your first triage bot up and running!\n\n\n1. Install the gem. (Note: The gem command is available with Ruby\nprogramming language installed.)\n\n\n```bash\n\ngem install gitlab-triage\n\n```\n\n\n2. Get your GitLab API token.\n\n\n* Go to your GitLab [profile\nsettings](https://gitlab.com/-/profile/preferences).  \n\n* Navigate to **Access Tokens**.  \n\n* Create a new token with the `api` scope.  \n\n* **Keep your token secure and set an expiration date for it based on when\nyou will be done with this walkthrough!**\n\n\n3. Define your triage policies.\n\n\nCreate a file named `.triage-policies.yml` in your project's root directory.\nThis file will contain the rules that govern your bot's behavior. Here's a\nsimple example:\n\n\n```yaml\n\n\n---\n\n- name: \"Apply 'WIP' label\"\n  condition:\n    draft: true\n  action:\n    labels:\n      - status::wip\n\n- name: \"Request more information on old issue\"\n  condition:\n   date:\n    attribute: updated_at\n    condition: older_than\n    interval_type: months\n    interval: 12\n  action:\n    comment: |\n      {{author}} This issue has been open for more than 12 months, is this still an issue?\n```\n\n\nThis configuration defines two policies:\n\n\n* The first policy applies the `status::wip` label to any issue that is in\ndraft.  \n\n* The second policy adds a comment to an issue that the issue has not been\nupdated in 12 months.\n\n\n4. Run your bot.\n\n\nYou can run your bot manually using the following command:\n\n\n```bash\n\ngitlab-triage -t \u003Cyour_api_token> -p \u003Cyour_project_id>\n\n```\n\n\nReplace `\u003Cyour_api_token>` with your GitLab API token and\n`\u003Cyour_project_id>` with the [ID of your GitLab\nproject](https://docs.gitlab.com/user/project/working_with_projects/#access-a-project-by-using-the-project-id).\nIf you would like to see the impact of actions before they are taken, you\ncan add the `-n` or `--dry-run` to test out the policies first.\n\n\n## Automating with GitLab CI/CD\n\n\nTo automate the execution of your triage bot, integrate it with [GitLab\nCI/CD](https://about.gitlab.com/blog/ultimate-guide-to-ci-cd-fundamentals-to-advanced-implementation/).\nHere's an example `.gitlab-ci.yml` configuration:\n\n\n```yaml\n\n\ntriage:\n  script:\n    - gem install gitlab-triage\n    - gitlab-triage -t $GITLAB_TOKEN -p $CI_PROJECT_ID\n  only:\n    - schedules\n```\n\n\nThis configuration defines a job named \"triage\" that installs the\n`gitlab-triage` gem and runs the bot using the `$GITLAB_TOKEN` (a predefined\n[CI/CD variable](https://docs.gitlab.com/ci/variables/)) and the\n`$CI_PROJECT_ID` variable. The `only: schedules` clause ensures that the job\nruns only on a schedule.\n\n\nTo create a\n[schedule](https://docs.gitlab.com/ee/ci/pipelines/schedules.html), go to\nyour project's **CI/CD** settings and navigate to **Schedules**. Create a\nnew schedule and define the frequency at which you want your bot to run\n(e.g., daily, hourly).\n\n\n## Advanced triage policies\n\n\n`gitlab-triage` offers a range of advanced features for creating more\ncomplex triage policies:\n\n\n* **Regular expressions:** Use regular expressions for more powerful pattern\nmatching.  \n\n* **Summary policies:** Consolidate related issues into a single summary\nissue.  \n\n* **Custom actions:** Define custom actions using [Ruby code\nblocks](https://gitlab.com/gitlab-org/ruby/gems/gitlab-triage#can-i-customize)\nto perform more complex operations using the GitLab API.\n\n\nHere are two advanced real-world examples from the triage bot used by the\nDeveloper Advocacy team at GitLab. You can view the full policies in [this\nfile](https://gitlab.com/gitlab-da/projects/devrel-bot/-/blob/master/.triage-policies.yml?ref_type=heads).\n\n\n```yaml\n\n- name: Issues where DA team member is an assignee outside DA-Meta project\ni.e. DevRel-Influenced\n  conditions:\n    assignee_member:\n      source: group\n      condition: member_of\n      source_id: 1008\n    state: opened\n    ruby: get_project_id != 18 \n    forbidden_labels:\n      - developer-advocacy\n  actions:   \n    labels:\n      - developer-advocacy\n      - DevRel-Influenced\n      - DA-Bot::Skip\n```\n\n\nThis example for issues across a group, excluding those in the project with\nthe ID of 18, have assignees who are members of the group with ID of 1008\nand do not have the label `developer-advocacy` on them. This policy helps\nthe Developer Advocacy team at GitLab to find issues members of the team are\nassigned to but are not in their team’s project. This helps the team\nidentify and keep track of contributions made outside of the team by adding\nthe teams’ labels.\n\n\n```\n\n- name: Missing Due Dates\n  conditions:\n    ruby: missing_due_date\n    state: opened\n    labels:\n      - developer-advocacy\n    forbidden_labels:\n      - DA-Due::N/A\n      - DA-Bot::Skip\n      - DA-Status::FYI\n      - DA-Status::OnHold\n      - CFP\n      - DA-Bot::Triage\n  actions:\n    labels:\n      - DA-Bot-Auto-Due-Date\n    comment: |\n      /due #{get_current_quarter_last_date}\n```\n\n\nThis second example checks for all issues with the `developer-advocacy`\nlabel, which do not include labels in the forbidden labels list and when\ntheir due dates have passed. It updates the due dates automatically by\ncommenting on the issue with a slash command and a date that is generated\nusing Ruby.\n\n\nThe Ruby scripts used in the policies are defined in a separate file as\nshown below. This feature allows you to be flexible in working with your\nfilters and actions. You can see functions are created for different Ruby\ncommands that we used in our policies. \n\n\n```\n\nrequire 'json'\n\nrequire 'date'\n\nrequire \"faraday\"\n\nrequire 'dotenv/load'\n\n\nmodule DATriagePlugin\n  def last_comment_at\n    conn = Faraday.new(\n      url: notes_url+\"?sort=desc&order_by=created_at&pagination=keyset&per_page=1\",\n      headers: {'PRIVATE-TOKEN' => ENV.fetch(\"PRIV_KEY\"), 'Content-Type' => 'application/json' }\n    )\n\n    response = conn.get()\n    if response.status == 200\n      jsonData = JSON.parse(response.body)\n      if jsonData.length > 0\n        Date.parse(jsonData[0]['created_at'])\n      else\n        Date.parse(resource[:created_at])\n      end\n    else\n      Date.parse(resource[:created_at])\n    end\n  end\n\n  def notes_url\n    resource[:_links][:notes]\n  end\n\n  def get_project_id\n    resource[:project_id]\n  end\n\n  def get_current_quarter_last_date()\n    yr = Time.now.year\n    case Time.now.month\n    when 2..4\n      lm = 4\n    when 5..7\n      lm = 7\n    when 8..10\n      lm = 10\n    when 11..12\n      lm = 1\n      yr = yr + 1\n    else\n      lm = 1    \n    end\n\n    return Date.new(yr, lm, -1) \n  end\n\n  def one_week_to_due_date\n    if(resource[:due_date] == nil)\n      false\n    else\n      days_to_due = (Date.parse(resource[:due_date]) - Date.today).to_i\n      if(days_to_due > 0 && days_to_due \u003C 7)\n        true\n      else\n        false\n      end\n    end\n  end\n\n  def due_date_past\n    if(resource[:due_date] == nil)\n      false\n    else\n      Date.today > Date.parse(resource[:due_date])\n    end\n  end\n\n  def missing_due_date\n    if(resource[:due_date] == nil)\n      true\n    else\n      false\n    end\n  end\n\nend\n\n\nGitlab::Triage::Resource::Context.include DATriagePlugin\n\n\n```\n\nThe triage bot is executed using the command:\n\n\n``` \n\n`gitlab-triage -r ./triage_bot/issue_triage_plugin.rb --debug --token\n$PRIV_KEY --source-id gitlab-com --source groups`  \n\n```\n\n\n- `-r`: Passes in a  file of requirements for the performing triage. In this\ncase we are passing in our Ruby functions.  \n\n- `--debug`: Prints debugging information as part of the output.  \n\n- `--token`: Is used to pass in a valid GitLab API token.  \n\n- `--source`: Specifies if the sources of the issues it will search is\nwithin a group or a project.  \n\n- `--source-id`: Takes in the ID of the selected source type – in this case,\na group.\n\n\nThe GitLab [triage-ops](https://gitlab.com/gitlab-org/quality/triage-ops)\nproject is another real-world example that is more complex and you can learn\nhow to build your own triage bot.\n\n\n## Best practices\n\n\n* **Start simple:** Begin with basic policies and gradually increase\ncomplexity as needed. \n\n* **Test thoroughly:** Test your policies in a staging environment before\ndeploying them to production.  \n\n* **Monitor regularly:** Monitor your bot's activity to ensure it's behaving\nas expected. \n\n* **Use descriptive names:** Give your policies clear and descriptive names\nfor easy maintenance. \n\n* **Be mindful of the scope of your filters:** You might be tempted to\nfilter issues across groups where thousands of issues exist. However, this\ncan slow down the triage and also make the process fail due to rate\nlimitations against the GitLab API.  \n\n* **Prioritize using labels for triages:** To avoid spamming other users,\nlabels are a good way to perform triages without cluttering comments and\nissues.\n\n\n## Take control of your workflow\n\n\nWith the `gitlab-triage` gem, you can automate your GitLab workflow and\nunlock new levels of efficiency. Start by creating simple triage bots and\ngradually explore the more advanced features. You'll be amazed at how much\ntime and effort you can save\\!\n\n\n> #### Want to take your learning to the next level? [Sign up for GitLab\nUniversity courses](https://university.gitlab.com/). Or you can get going\nright away with a [free trial of GitLab\nUltimate](https://about.gitlab.com/free-trial/).\n\n\n## \"Getting started with GitLab\" series\n\nRead more articles in our \"Getting started with GitLab\" series:\n\n\n- [How to manage\nusers](https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-manage-users/)\n\n- [How to import your projects to\nGitLab](https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-import-your-projects-to-gitlab/)  \n\n- [Mastering project\nmanagement](https://about.gitlab.com/blog/getting-started-with-gitlab-mastering-project-management/)\n\n- [Understanding\nCI/CD](https://about.gitlab.com/blog/getting-started-with-gitlab-understanding-ci-cd/)\n\n- [Working with CI/CD\nvariables](https://about.gitlab.com/blog/getting-started-with-gitlab-working-with-ci-cd-variables/)\n",[9,719,717,908,109],"agile",{"slug":910,"featured":6,"template":698},"automating-agile-workflows-with-the-gitlab-triage-gem","content:en-us:blog:automating-agile-workflows-with-the-gitlab-triage-gem.yml","Automating Agile Workflows With The Gitlab Triage Gem","en-us/blog/automating-agile-workflows-with-the-gitlab-triage-gem.yml","en-us/blog/automating-agile-workflows-with-the-gitlab-triage-gem",{"_path":916,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":917,"content":923,"config":931,"_id":933,"_type":14,"title":934,"_source":16,"_file":935,"_stem":936,"_extension":19},"/en-us/blog/automating-container-image-migration-from-amazon-ecr-to-gitlab",{"title":918,"description":919,"ogTitle":918,"ogDescription":919,"noIndex":6,"ogImage":920,"ogUrl":921,"ogSiteName":685,"ogType":686,"canonicalUrls":921,"schema":922},"Automating container image migration from Amazon ECR to GitLab","When platform teams move their CI/CD to GitLab, migrating container images shouldn't be the bottleneck. Follow this step-by-step guide to automate the pipeline migration process.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663129/Blog/Hero%20Images/blog-image-template-1800x945__28_.png","https://about.gitlab.com/blog/automating-container-image-migration-from-amazon-ecr-to-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Automating container image migration from Amazon ECR to GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Tim Rizzi\"}],\n        \"datePublished\": \"2025-02-13\",\n      }",{"title":918,"description":919,"authors":924,"heroImage":920,"date":926,"body":927,"category":928,"tags":929},[925],"Tim Rizzi","2025-02-13","\"We need to migrate hundreds of container images from Amazon Elastic\nContainer Registry (ECR) to GitLab. Can you help?\" This question kept coming\nup in conversations with platform engineers. They were modernizing their\nDevSecOps toolchain with GitLab but got stuck when faced with moving their\ncontainer images. While each image transfer is simple, the sheer volume made\nit daunting.\n\n\nOne platform engineer perfectly said, \"I know exactly what needs to be done\n– pull, retag, push. But I have 200 microservices, each with multiple tags.\nI can't justify spending weeks on this migration when I have critical\ninfrastructure work.\"\n\n\n## The challenge\n\n\nThat conversation sparked an idea. What if we could automate the entire\nprocess? When platform teams move their\n[CI/CD](https://about.gitlab.com/topics/ci-cd/) to GitLab, migrating\ncontainer images shouldn't be the bottleneck. The manual process is\nstraightforward but repetitive – pull each image, retag it, and push it to\nGitLab's Container Registry. Multiply this by dozens of repositories and\nmultiple tags per image, and you're looking at days or weeks of tedious\nwork.\n\n\n## The solution\n\n\nWe set out to create a GitLab pipeline that would automatically do all this\nheavy lifting. The goal was simple: Give platform engineers a tool they\ncould set up in minutes and let run overnight, waking up to find all their\nimages migrated successfully.\n\n\n### Setting up access\n\n\nFirst things first – security. We wanted to ensure teams could run this\nmigration with minimal AWS permissions. Here's the read-only identity and\naccess management (IAM) policy you'll need:\n\n\n```json\n\n{\n    \"Version\": \"2012-10-17\",\n    \"Statement\": [\n        {\n            \"Effect\": \"Allow\",\n            \"Action\": [\n                \"ecr:GetAuthorizationToken\",\n                \"ecr:BatchCheckLayerAvailability\",\n                \"ecr:GetDownloadUrlForLayer\",\n                \"ecr:DescribeRepositories\",\n                \"ecr:ListImages\",\n                \"ecr:DescribeImages\",\n                \"ecr:BatchGetImage\"\n            ],\n            \"Resource\": \"*\"\n        }\n    ]\n}\n\n```\n\n\n### GitLab configuration\n\n\nWith security handled, the next step is setting up GitLab. We kept this\nminimal - you'll need to configure these variables in your CI/CD settings:\n\n\n```\n\nAWS_ACCOUNT_ID: Your AWS account number\n\nAWS_DEFAULT_REGION: Your ECR region\n\nAWS_ACCESS_KEY_ID: [Masked]\n\nAWS_SECRET_ACCESS_KEY: [Masked]\n\nBULK_MIGRATE: true\n\n```\n\n\n### The migration pipeline\n\n\nNow for the interesting part. We built the pipeline using Docker-in-Docker\nto handle all the image operations reliably:\n\n\n```yaml\n\nimage: docker:20.10\n\nservices:\n  - docker:20.10-dind\n\nbefore_script:\n  - apk add --no-cache aws-cli jq\n  - aws sts get-caller-identity\n  - aws ecr get-login-password | docker login --username AWS --password-stdin\n  - docker login -u ${CI_REGISTRY_USER} -p ${CI_REGISTRY_PASSWORD} ${CI_REGISTRY}\n```\n\n\nThe pipeline works in three phases, each building on the last:\n\n\n1. Discovery\n\n\nFirst, it finds all your repositories:\n\n\n```bash\n\nREPOS=$(aws ecr describe-repositories --query\n'repositories[*].repositoryName' --output text)\n\n```\n\n\n2. Tag enumeration\n\n\nThen, for each repository, it gets all the tags:\n\n\n```bash\n\nTAGS=$(aws ecr describe-images --repository-name $repo --query\n'imageDetails[*].imageTags[]' --output text)\n\n```\n\n\n3. Transfer\n\n\nFinally, it handles the actual migration:\n\n\n```bash\n\ndocker pull\n${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_DEFAULT_REGION}.amazonaws.com/${repo}:${tag}\n\ndocker tag\n${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_DEFAULT_REGION}.amazonaws.com/${repo}:${tag}\n${CI_REGISTRY_IMAGE}/${repo}:${tag}\n\ndocker push ${CI_REGISTRY_IMAGE}/${repo}:${tag}\n\n```\n\n\n## What you get\n\n\nRemember that platform engineer who didn't want to spend weeks on migration?\nHere's what this solution delivers:\n\n\n- automated discovery and migration of all repositories and tags\n\n- consistent image naming between ECR and GitLab\n\n- error handling for failed transfers\n\n- clear logging for tracking progress\n\n\nInstead of writing scripts and babysitting the migration, the platform\nengineer could focus on more valuable work.\n\n\n## Usage\n\n\nGetting started is straightforward:\n\n\n1. Copy the `.gitlab-ci.yml` to your repository.\n\n2. Configure the AWS and GitLab variables.\n\n3. Set `BULK_MIGRATE` to \"true\" to start the migration.\n\n\n## Best practices\n\n\nThrough helping teams with their migrations, we've learned a few things:\n\n\n- Run during off-peak hours to minimize the impact on your team.\n\n- Keep an eye on the pipeline logs - they'll tell you if anything needs\nattention.\n\n- Don't decommission ECR until you've verified all images transferred\nsuccessfully.\n\n- For very large migrations, consider adding rate limiting to avoid\noverwhelming your network\n\n\nWe've open-sourced this pipeline in our public GitLab repository because we\nbelieve platform engineers should spend time building valuable\ninfrastructure, not copying container images. Feel free to adapt it for your\nneeds or ask questions about implementation.\n\n\n> #### Get started with this and other package components with our [CI/CD\nCatalog\ndocumentation](https://gitlab.com/explore/catalog/components/package).\n","engineering",[109,805,719,9,717,930],"solutions architecture",{"slug":932,"featured":91,"template":698},"automating-container-image-migration-from-amazon-ecr-to-gitlab","content:en-us:blog:automating-container-image-migration-from-amazon-ecr-to-gitlab.yml","Automating Container Image Migration From Amazon Ecr To Gitlab","en-us/blog/automating-container-image-migration-from-amazon-ecr-to-gitlab.yml","en-us/blog/automating-container-image-migration-from-amazon-ecr-to-gitlab",{"_path":938,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":939,"content":945,"config":952,"_id":954,"_type":14,"title":955,"_source":16,"_file":956,"_stem":957,"_extension":19},"/en-us/blog/automating-cybersecurity-threat-detections-with-gitlab-ci-cd",{"title":940,"description":941,"ogTitle":940,"ogDescription":941,"noIndex":6,"ogImage":942,"ogUrl":943,"ogSiteName":685,"ogType":686,"canonicalUrls":943,"schema":944},"Automating cybersecurity threat detections with GitLab CI/CD","Discover how GUARD automates cybersecurity threat detections through the use\nof GitLab CI/CD and how it ensures high-quality detections.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663239/Blog/Hero%20Images/AdobeStock_1023776629.jpg","https://about.gitlab.com/blog/automating-cybersecurity-threat-detections-with-gitlab-ci-cd","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Automating cybersecurity threat detections with GitLab CI/CD\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Mitra Jozenazemian\"}],\n        \"datePublished\": \"2025-01-29\",\n      }",{"title":940,"description":941,"authors":946,"heroImage":942,"date":948,"body":949,"category":693,"tags":950},[947],"Mitra Jozenazemian","2025-01-29","*This blog post is the second post in a series about [GitLab Universal\nAutomated Response and Detection\n(GUARD)](https://about.gitlab.com/blog/unveiling-the-guard-framework-to-automate-security-detections-at-gitlab/).*\n\n\nWriting and deploying security threat detections in an organization’s\nsecurity information event management platform (SIEM) is a critical\ncomponent of a successful cybersecurity program. Moving from manual\ndetection engineering to a fully automated process by implementing\nDetections as Code (DaC) ensures detection consistency, quality, auditing,\nand automated testing. At GitLab, we’ve embedded DaC capabilities into\nGUARD, our fully automated detection and response framework. \n\n\n## The problem: Source control and automated tests\n\n\nThe [Signals\nEngineering](https://handbook.gitlab.com/handbook/security/security-operations/signals-engineering/)\nand\n[SIRT](https://handbook.gitlab.com/handbook/security/security-operations/sirt/)\nteam at GitLab share the responsibility to create, update, and decommission\nthreat detections in our SIEM. Maintaining a single source of truth for\ndetections is critical to ensure detection consistency and quality standards\nare met. Our teams made the conscious decision to abstract the detection\ncreation process from our SIEM, improving our issue tracking, consistency,\nroll-back process, and metrics. Additionally, conducting pre-commit\ndetection tests outside of our SIEM ensured that newly created detections\ndidn’t introduce overly false positive heavy alerts, which would require\ntuning or disablement while the alert was fixed. \n\n\n## The Solution: Leverage GitLab CI/CD for detection testing and validation\n\n\nTo address these challenges, we developed an efficient workflow using GitLab\n[CI/CD](https://about.gitlab.com/topics/ci-cd/), resulting in a streamlined\nand secure SIEM detection deployment process.\n\n\n### Key components of the GUARD DaC pipeline \n\n\n__1. Detections stored in JSON format in a GitLab project__\n\n\nGitLab uses the JSON format for our threat detections. The template includes\nessential information such as SIEM query logic, detection title, and\ndescription along with runbook page link, MITRE tactic and technique related\nto the detection, and other necessary details.\n\n\n__2. Initiating merge requests__\n\n\nWhen a GitLab team member intends to create a new threat detection, update\nan existing one, or delete a current detection, they initiate the process by\nsubmitting a merge request (MR) in the DaC project containing the detection\nJSON template. Creating the MR automatically triggers a CI/CD pipeline.\n\n\n__3. Automated validation with CI/CD jobs__\n\n\nEach MR contains a number of automated checks via GitLab CI/CD:   \n\n* Query format validation queries SIEM API to ensure detection query is\nvalid  \n\n* JSON Detection fields validation validates all required fields are\npresent, and are in the correct format   \n\n* New detections and detection modification trigger a number of SIEM API\ncalls to ensure the detection does not have any errors and that no issues\nwill be introduced into our production detection rules   \n\n* Detection deletion MRs trigger the pipeline to issue a SIEM API query to\nensure the detection to be deleted is still active and can be deleted \n\n\n__4. Peer review and approval__\n\n\nWhen a detection MR job completes successfully, a peer review is required to\nreview and confirm the MR meets required quality and content standards\nbefore the detection MR can be merged. [Merge request approval\nrules](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/rules.html)\nare used to trigger the peer review process. \n\n\n__5. Merge and final deployment__\n\n\nAfter the MR is approved, it is merged into the main branch. As part of the\nCI/CD pipeline, an automated job executes a SIEM API command in order to\nperform two tasks:   \n\n* Create the new detection or update/delete the existing detection if\nneeded.   \n\n* Extract the MITRE ATT&CK tactic and technique information related to the\nalert from the JSON files and transmit these details to a lookup table\nwithin the SIEM. This lookup table plays an important role in mapping our\nalerts to MITRE tactics and techniques, helping us improve our threat\nanalysis and identify gaps in our detection capabilities in alignment with\nthe MITRE framework.\n\n\n**Note:** The necessary credentials for these actions are securely stored in\n[CI/CD variables](https://docs.gitlab.com/ee/ci/variables/) to ensure the\nprocess remains confidential and secure.\n\n\nBelow is a template GitLab CI/CD `gitlab-ci.yml` configuration file for a\nDaC pipeline: \n\n\n```\n\n\n#\n---------------------------------------------------------------------------\n#\n\n# GitLab CI/CD Pipeline for SIEM Detection Management\n\n#\n---------------------------------------------------------------------------\n#\n\n\nimage: python:3.12\n\n\n#\n---------------------------------------------------------------------------\n#\n\n# Global Configuration\n\n#\n---------------------------------------------------------------------------\n#\n\n\nbefore_script:\n  - apt-get update && apt-get install -y jq\n  - pip install --upgrade pip\n  - pip install -r requirements.txt\n\n#\n---------------------------------------------------------------------------\n#\n\n\nstages:\n  - fetch\n  - test\n  - process\n  - upload\n\n#\n---------------------------------------------------------------------------\n#\n\n# Fetch Stage\n\n#\n---------------------------------------------------------------------------\n#\n\n\nfetch_changed_files:\n  stage: fetch\n  Script:\n    - echo \"Fetching changed files...\"\n    - git branch\n    - git fetch origin $CI_DEFAULT_BRANCH:$CI_DEFAULT_BRANCH --depth 2000\n    - |\n      if [[ \"$CI_COMMIT_BRANCH\" == \"$CI_DEFAULT_BRANCH\" ]]; then\n        git diff --name-status HEAD^1...HEAD > changed-files-temp.txt\n      else\n        git fetch origin $CI_COMMIT_BRANCH:$CI_COMMIT_BRANCH --depth 2000\n        git diff --name-status ${CI_DEFAULT_BRANCH}...${CI_COMMIT_SHA} > changed-files-temp.txt\n      fi\n    - grep -E '\\.json$' changed-files-temp.txt > changed-files.txt || true\n    - flake8 .\n    - pytest\n  artifacts:\n    paths:\n      - changed-files.txt\n    expose_as: 'changed_files'\n\n#\n---------------------------------------------------------------------------\n#\n\n# Test Stage\n\n#\n---------------------------------------------------------------------------\n#\n\n\nflake8:\n  stage: test\n  script:\n    - echo \"Running Flake8 for linting...\"\n    - flake8 .\n\npytest:\n  stage: test\n  script:\n    - echo \"Running Pytest for unit tests...\"\n    - pytest\n  artifacts:\n    when: always\n    reports:\n      junit: report.xml\n\n#\n---------------------------------------------------------------------------\n#\n\n# Process Stage\n\n#\n---------------------------------------------------------------------------\n#\n\n\nprocess_files:\n  stage: process\n  script:\n    - echo \"Processing changed files...\"\n    - git clone --depth 2000 --branch $CI_DEFAULT_BRANCH $CI_REPOSITORY_URL\n    - mkdir -p modified_rules delete_file new_file\n    - python3 move-files.py -x changed-files.txt\n    - python3 check-alerts-format.py\n  artifacts:\n    paths:\n      - modified_rules\n      - delete_file\n      - new_file\n#\n---------------------------------------------------------------------------\n#\n\n# Upload Stage\n\n#\n---------------------------------------------------------------------------\n#\n\n\nupdate_rules:\n  stage: upload\n  script:\n    - echo \"Uploading updated rules and lookup tables...\"\n    - git fetch origin $CI_DEFAULT_BRANCH:$CI_DEFAULT_BRANCH --depth 2000\n    - git clone --depth 2000 --branch $CI_DEFAULT_BRANCH $CI_REPOSITORY_URL \n    - python3 update-rules.py\n    - python3 update-exceptions.py\n    - python3 create_ttps_layers.py\n  rules:\n    - if: $CI_COMMIT_BRANCH == \"main\" && $CI_PIPELINE_SOURCE != \"schedule\"\n      changes:\n        - detections/**/*\n        - exceptions/**/*\n```\n\n\nThe diagram below illustrates the workflow of the CI/CD process described\nabove.\n\n\n```mermaid\n\ngraph TD;\n    fetch[Fetch Stage: Identify Changed Files] --> test[Test Stage: Run Linting and Tests];\n    test --> process[Process Stage: Categorize Files];\n    process --> upload[Upload Stage: Update Rules and Lookup Tables];\n    fetch --> fetch_details[Details: Filter JSON files, Output 'changed-files.txt'];\n    test --> test_details[Details: Run Flake8 for linting, Pytest for testing];\n    process --> process_details[Details: Categorize into 'modified', 'new', 'deleted', Prepare for upload];\n    upload --> upload_details[Details: Update repo, Update detections in SIEM and SIEM lookup table];\n```\n\n\n## Benefits and outcomes\n\n\nAutomating our detections lifecycle through a DaC CI/CD-powered workflow\nintroduces numerous benefits to our threat detection deployment process:\n\n\n* Automation: Automating the creation and validation of SIEM detections\nreduces manual errors and saves time.\n\n* Enhanced security: The CI-driven workflow enforces a \"least privilege\"\npolicy, ensuring consistency, peer reviews, and quality standards for\ncreating, updating, or deleting threat detections. \n\n* Efficiency: The standardized JSON detection format and automated creation\nexpedite the deployment process.\n\n* Collaboration: The MR and review process fosters collaboration and\nknowledge sharing among GitLab team members.\n\n* Version control: Treating threat detection as code abstracts the\ndetections from the SIEM platform they are ultimately stored in. This\nabstraction provides a historical record of changes, facilitates\ncollaboration, and enables rollbacks to previous configurations if issues\narise.\n\n\n## Get started with DaC\n\n\nUsing GitLab CI/CD and a \"least privilege\" policy has made our SIEM\ndetection and alert management easier and more secure. Automation has\nimproved efficiency and reduced risks, providing a helpful example for\nothers wanting to improve their security and compliance. You can try this\ntutorial by signing up for a [free trial of GitLab\nUltimate](https://about.gitlab.com/free-trial/).",[693,719,951,9,109],"DevSecOps",{"slug":953,"featured":6,"template":698},"automating-cybersecurity-threat-detections-with-gitlab-ci-cd","content:en-us:blog:automating-cybersecurity-threat-detections-with-gitlab-ci-cd.yml","Automating Cybersecurity Threat Detections With Gitlab Ci Cd","en-us/blog/automating-cybersecurity-threat-detections-with-gitlab-ci-cd.yml","en-us/blog/automating-cybersecurity-threat-detections-with-gitlab-ci-cd",{"_path":959,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":960,"content":966,"config":973,"_id":975,"_type":14,"title":976,"_source":16,"_file":977,"_stem":978,"_extension":19},"/en-us/blog/automating-with-gitlab-duo-part-1-generating-tests",{"title":961,"description":962,"ogTitle":961,"ogDescription":962,"noIndex":6,"ogImage":963,"ogUrl":964,"ogSiteName":685,"ogType":686,"canonicalUrls":964,"schema":965},"Automating with GitLab Duo, Part 1: Generating tests","Learn how we used the AI-driven DevSecOps platform to generate automated tests and improve our development speed and quality.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097480/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%284%29_3LZkiDjHLjhqEkvOvBsVKp_1750097480784.png","https://about.gitlab.com/blog/automating-with-gitlab-duo-part-1-generating-tests","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Automating with GitLab Duo, Part 1: Generating tests\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Byron Boots\"}],\n        \"datePublished\": \"2024-12-02\",\n      }",{"title":961,"description":962,"authors":967,"heroImage":963,"date":969,"body":970,"category":801,"tags":971},[968],"Byron Boots","2024-12-02","Automated testing is time-consuming and can feel like it’s not moving a project forward. However, as many developers have likely experienced, automated testing provides an overall positive return on investment. In building a custom module (we'll call it gitlab-helper for this article), this was particularly true.\n\nOur initial development focused on migrating tried and used functionality from existing scripts to a new module whose sole purpose was to serve as a baseline for future functionality. Although existing scripts lacked automated testing, their consistent usage was strong anecdotal evidence the functionality worked as expected.\n\nOur objective was to deliver a more mature solution to this problem, so automated testing became a necessity. This introduced the challenge of building efficiently, while balancing the time to test and ensure a robust product; and with a total of three team members, this was no small bottleneck. Therefore, the team decided to take advantage of [GitLab Duo](https://about.gitlab.com/gitlab-duo/), our suite of AI capabilities, for test generation, improving speed and quality of the delivered product.\n\nIn this three-part series on automating with GitLab Duo, we will cover:\n\n1. How we used GitLab Duo to generate tests for our code  \n2. How we worked interactively with GitLab Duo for more complex situations  \n3. The results we were able to achieve (Spoiler: 1 developer + GitLab Duo = 84% coverage in 2 days)\n\n## Using GitLab Duo to generate tests for code\n\nWhile functionality is available across tools, this article will cover using GitLab Duo in VS Code, with the [GitLab Workflow extension for VS Code](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow) to generate tests. Links to other GitLab Duo options are available in the [references](#references) below.\n\n### Install and enable GitLab Duo\n\nAs a prerequisite to using GitLab Duo, we ensured we had a GitLab Duo-enabled account. If you don't have GitLab Duo, you can [sign up for a free trial](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/?type=free-trial).\n\nTo use GitLab Duo Chat in VS Code, we followed the [instructions for installation](https://docs.gitlab.com/ee/user/gitlab_duo_chat/#use-gitlab-duo-chat-in-vs-code). Then, we were able to see the GitLab Duo Chat extension on the sidebar and open the Chat window.\n\n![Ask a question window](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097489/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097488918.png)\n\n### Generate tests with Chat\n\ngitlab-helper is a custom module built for standardizing interaction with the GitLab API across the team's work and extends other library functionalities to simplify development and scripting work. Once a method or feature was migrated to gitlab-helper and appeared to be implemented appropriately, the process to generate tests for it was simple:\n- Select the method, class, or entire file in the IDE.\n- Right-click on the selected code.\n- Under **GitLab Duo Chat**, select **Generate tests**.\n\n![Sequence to generate tests, including drop-down for generate tests](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097489/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097488919.png)\n\nWithin a few seconds, tests were generated and presented in the GitLab Duo Chat window. These tests can be reviewed and or added to the codebase, via copy/paste, into existing or new test files. As is the case with most natural language processing generations today, particularly around context, some of the initial tests created by GitLab Duo failed, thus requiring finetuning (for instance, when dealing with nested dependencies).\n\n> **Pro tip:** GitLab Duo does not auto-create files to add generated tests to. We found it was helpful to create new test files and add a `# Tests Generated by Duo` comment at the top of them and suffix them with `_duo.py` to indicate where the tests came from.\n\nGitLab Duo provided a great starting point for building out gitlab-helper’s automated testing and greatly improved test writing efficiency and code coverage, speeding up the development process substantially. Alongside GitLab Duo, numerous iterations of valuable tests were introduced into the gitlab-helper module with human oversight.\n\nRead the next installment in this series where we share [what we learned while using GitLab Duo for generating automated tests](https://about.gitlab.com/blog/automating-with-gitlab-duo-part-2-complex-testing/) and working interactively with AI for more complex situations.\n\n## References\n\nThere’s more than one way to use GitLab Duo to generate tests, check out the other options below:\n\n* The GitLab UI  \n* [The GitLab Web IDE (VS Code in the cloud)](https://docs.gitlab.com/ee/user/project/web_ide/index.html)  \n* VS Code, with the [GitLab Workflow extension for VS Code](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow)  \n* JetBrains IDEs, with the [GitLab Duo Plugin for JetBrains](https://plugins.jetbrains.com/plugin/22325-gitlab-duo)  \n* Visual Studio for Windows, with the [GitLab Extension for Visual Studio](https://marketplace.visualstudio.com/items?itemName=GitLab.GitLabExtensionForVisualStudio)\n",[803,719,972,9,695],"testing",{"slug":974,"featured":6,"template":698},"automating-with-gitlab-duo-part-1-generating-tests","content:en-us:blog:automating-with-gitlab-duo-part-1-generating-tests.yml","Automating With Gitlab Duo Part 1 Generating Tests","en-us/blog/automating-with-gitlab-duo-part-1-generating-tests.yml","en-us/blog/automating-with-gitlab-duo-part-1-generating-tests",{"_path":980,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":981,"content":987,"config":992,"_id":994,"_type":14,"title":995,"_source":16,"_file":996,"_stem":997,"_extension":19},"/en-us/blog/automating-with-gitlab-duo-part-2-complex-testing",{"title":982,"description":983,"ogTitle":982,"ogDescription":983,"noIndex":6,"ogImage":984,"ogUrl":985,"ogSiteName":685,"ogType":686,"canonicalUrls":985,"schema":986},"Automating with GitLab Duo, Part 2: Complex testing","Find out how the GitLab team addressed more complex testing situations using GitLab Duo's AI capabilities, including ensuring that code testing followed standards.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099243/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%284%29_3LZkiDjHLjhqEkvOvBsVKp_1750099243011.png","https://about.gitlab.com/blog/automating-with-gitlab-duo-part-2-complex-testing","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Automating with GitLab Duo, Part 2: Complex testing\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Byron Boots\"}],\n        \"datePublished\": \"2024-12-10\",\n      }",{"title":982,"description":983,"authors":988,"heroImage":984,"date":989,"body":990,"category":801,"tags":991},[968],"2024-12-10","The first part of our three-part series on [test generation with GitLab Duo](https://about.gitlab.com/blog/automating-with-gitlab-duo-part-1-generating-tests/) focused on how to automate code testing. Now, we will share the lessons we learned while using AI for test generation.\n\n## Situations we encountered and how we handled them\n\nOverall, we were pleased with the results using [GitLab Duo](https://about.gitlab.com/gitlab-duo/) to generate tests on our code. As is the case with any language generation, some cases required minor adjustments such as fixing import paths or editing contents in datasets. For the more complex cases, we had to remember that AI solutions often lack context. Here's how we handled the more complex testing situations with GitLab Duo.\n\n### Updating existing test cases\n\nAs is often the case when developing a software product, we encountered instances that required updates to existing tests. Rather than manually making adjustments to a full test suite for a common issue, we took full advantage of the GitLab Duo Chat window in VS Code. For example, to refactor tests, we used the Chat prompt “Please update the provided tests to use unittest rather than pytest” followed by pasting in the tests we wanted GitLab Duo to update.\n\n![Automated test generation](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099252/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750099252303.png)\n\n\u003Cbr>\u003C/br>\n\n![Chat prompt requesting use of unittest rather than pytest](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099252/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750099252304.png)\n\n**Note:** We copy-and-pasted GitLab Duo's recommendations into our code.\n\n### Creating tests for legacy code\n\nCreating tests for legacy code we knew worked was another challenging situation we encountered. In such circumstances, it was valuable to provide error snippets alongside failing tests and ask GitLab Duo to provide new tests. A full copy-and-paste from the terminal window of noted failures and errors to Chat, along with a request to “Please explain and fix this failing test” or similar prompts, yielded a summary of the issues the test was encountering as well as a new test addressing the problem. We did find this sometimes required multiple rounds of refactoring as new test failures were identified. However, the efficiency of GitLab Duo to provide various refactored solutions was fast and a net positive on team and developer efficiency.\n\n### Working with complex or abstracted code\n\nIn other instances, the modularization or complexity of our code led to variance in GitLab Duo’s results. For instance, when generating tests, GitLab Duo sometimes generated a series of passing and failing tests caused by differences in testing approach (e.g. usage of Mock and which objects were mocked). We provided GitLab Duo its own example of a passing test and asked it to modify individual tests one at a time to match the style of the passing tests to maintain consistency. We also would provide GitLab Duo a file of functioning tests for a similar object or task so it could mirror the structure.\n\n### Ensuring generated code follows our standards\n\nWhile developing a Python module, GitLab Duo generated many tests using Mock and often they required refactoring, particularly around naming standardization. In such cases, we could leverage GitLab Duo Chat to refactor tests with instructions as to which specific test components to update. Prompting GitLab Duo for these changes was immensely faster than refactoring tests individually, as we had previousy done.\n\n### Addressing uncovered test cases\n\nGitLab Duo generated tests for additional test cases the team had not previously considered, thus increasing coverage. Luckily, we could use GitLab Duo to quickly and efficiently address these edge cases and expand testing coverage, which is a key value-add for our team to build quickly and ensure a robust product.\n\n## What we learned\n\nHere are few key lessons that have been important to our success with GitLab Duo:\n\n* **Fast and efficient for rapid development and iteration -** GitLab Duo’s role in generating automated tests has been a key accelerator in development for our team and allowed us to work faster and with greater confidence in our changes.\n* **Important to use appropriate prompts -** When using GitLab Duo for our use case, we touched on a key topic for machine learning optimization: prompt engineering. Sometimes we needed to modify our question by just a few keywords to lead to the ideal generated answer. \n* **Need understanding of underlying frameworks and code -** When it comes to any AI-generated code that makes it into a product, even if only as testing, it’s critical that we understand how the code functions so we can adequately debug as well as request informed changes.\n* **Need understanding of desired end state and standards -** Similar to following coding standards for formatting and library usage while developing without AI, it’s important to maintain the vision of what the intended outcomes look like and what standards are being adhered to when using AI. GitLab Duo needs the context to understand code standards, so it’s critical for team members using GitLab Duo to provide adequate oversight of its outputs to ensure quality and other expectations are met.\n* **GitLab Duo is not a replacement for all tests -** While we use GitLab Duo significantly for generating automated tests, it does not replace our other tests and human oversight. Functional tests, integration tests, and more still serve a valuable place in the QA process and overall software development lifecycle.\n\nIn our next article in this series, we’ll cover [a test we ran to validate the impact of GitLab Duo on our team’s automated testing](https://about.gitlab.com/blog/automating-with-gitlab-duo-part-3-validating-testing/) and discuss the impressive results we have achieved thus far.",[803,972,9],{"slug":993,"featured":6,"template":698},"automating-with-gitlab-duo-part-2-complex-testing","content:en-us:blog:automating-with-gitlab-duo-part-2-complex-testing.yml","Automating With Gitlab Duo Part 2 Complex Testing","en-us/blog/automating-with-gitlab-duo-part-2-complex-testing.yml","en-us/blog/automating-with-gitlab-duo-part-2-complex-testing",{"_path":999,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1000,"content":1006,"config":1011,"_id":1013,"_type":14,"title":1014,"_source":16,"_file":1015,"_stem":1016,"_extension":19},"/en-us/blog/automating-with-gitlab-duo-part-3-validating-testing",{"title":1001,"description":1002,"ogTitle":1001,"ogDescription":1002,"noIndex":6,"ogImage":1003,"ogUrl":1004,"ogSiteName":685,"ogType":686,"canonicalUrls":1004,"schema":1005},"Automating with GitLab Duo, Part 3: Validating testing","Discover what test we ran to validate the impact of GitLab Duo on our team’s automated testing – and the results we achieved.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097447/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%284%29_3LZkiDjHLjhqEkvOvBsVKp_1750097447404.png","https://about.gitlab.com/blog/automating-with-gitlab-duo-part-3-validating-testing","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Automating with GitLab Duo, Part 3: Validating testing\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Byron Boots\"}],\n        \"datePublished\": \"2024-12-17\",\n      }",{"title":1001,"description":1002,"authors":1007,"heroImage":1003,"date":1008,"body":1009,"category":801,"tags":1010},[968],"2024-12-17","In previous entries in this series, we covered [how we used GitLab Duo to generate tests for our code](https://about.gitlab.com/blog/automating-with-gitlab-duo-part-1-generating-tests/) as well as [what we learned while using GitLab Duo for generating automated tests](https://about.gitlab.com/blog/automating-with-gitlab-duo-part-2-complex-testing/). We also shared some of the ways we addressed making changes to GitLab Duo generated tests. This last article in the series will cover a test we ran to validate the impact of GitLab Duo on our team’s automated testing and discuss the impressive results we have achieved thus far.\n\n### Validation testing results\n\nTo validate that our usage of GitLab Duo to generate tests was adding value the way we expected, we challenged ourselves and GitLab Duo to replace and increase test coverage. The team removed all previously written tests to get our test coverage to 0% and then methodically went through the repository and created new test files to store GitLab Duo-generated tests.\n\nFrom this starting point, the team followed the steps outlined in [the first blog](https://about.gitlab.com/blog/automating-with-gitlab-duo-part-1-generating-tests/) to generate tests. Tests and test files were unmodified by humans to provide a stable control group and a `Tests Generated by Duo` comment at the top of them were suffixed by `duo.py` to indicate where the tests came from.\n\nAll iterations of the tests were only done through interactions with GitLab Duo through the `Generate Tests` and GitLab Duo Chat window as outlined in [the second blog in the series](https://about.gitlab.com/blog/automating-with-gitlab-duo-part-2-complex-testing/). As we shared, we requested GitLab Duo to make updates based on encountered errors, test failures, and example code snippets for GitLab Duo to use as added context. \n\nAt all times. when testing with GitLab Duo, we were running tests and coverage reports so we could see if our GitLab Duo-generated tests were increasing testing coverage and adding value as we expected. Taking advantage of [GitLab's test coverage visualization](https://docs.gitlab.com/ee/ci/testing/test_coverage_visualization/), we were able to continuously monitor the results of our work.\n\nUltimately, after using GitLab Duo to regenerate tests for code previously covered through our mostly manual testing, we were able to achieve test coverage of 84%. This was a great accomplishment for the team because:\n\n1. It was a significant improvement from prior coverage, which was at 74%.  \n2. It took approximately two days by one engineer to achieve 84%, compared to the approximately four weeks across multiple engineers that the 74% had taken.\n\nSince this experiment, the team has increased coverage even further to 89% with the help of GitLab Duo, while continuing to introduce new features.\n\n![image of achievements](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097456/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097456771.png)\n\nUsing GitLab Duo allowed for increased testing efficiency and coverage, and also allowed developers with lower context around existing code to write valuable tests, quickly. This has resulted in increased confidence on the team to develop new features without worrying about introducing errors.\n\n> If you'd like to [try GitLab Duo](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/), sign up for a free trial today!\n",[803,972,9,695],{"slug":1012,"featured":6,"template":698},"automating-with-gitlab-duo-part-3-validating-testing","content:en-us:blog:automating-with-gitlab-duo-part-3-validating-testing.yml","Automating With Gitlab Duo Part 3 Validating Testing","en-us/blog/automating-with-gitlab-duo-part-3-validating-testing.yml","en-us/blog/automating-with-gitlab-duo-part-3-validating-testing",{"_path":1018,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1019,"content":1025,"config":1034,"_id":1036,"_type":14,"title":1037,"_source":16,"_file":1038,"_stem":1039,"_extension":19},"/en-us/blog/beautifying-our-ui-enhancing-gitlabs-deployment-experience",{"title":1020,"description":1021,"ogTitle":1020,"ogDescription":1021,"noIndex":6,"ogImage":1022,"ogUrl":1023,"ogSiteName":685,"ogType":686,"canonicalUrls":1023,"schema":1024},"Beautifying our UI: Enhancing GitLab's deployment experience","Go inside our innovative approach to improving our user interface, including pairing product designers and frontend engineers to make usability improvements across the platform.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097783/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%288%29_5KLUrr4DkY2u0JTMA12FVm_1750097783460.png","https://about.gitlab.com/blog/beautifying-our-ui-enhancing-gitlabs-deployment-experience","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Beautifying our UI: Enhancing GitLab's deployment experience\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Emily Bauman\"}],\n        \"datePublished\": \"2025-03-06\",\n      }",{"title":1020,"description":1021,"authors":1026,"heroImage":1022,"date":1028,"body":1029,"category":717,"tags":1030},[1027],"Emily Bauman","2025-03-06","At GitLab, we’ve implemented an innovative approach to improving our experience called [Beautifying our UI](https://handbook.gitlab.com/handbook/product/ux/product-design/#beautifying-our-ui). This unique initiative pairs one product designer with a frontend engineer for a milestone or two, and empowers them to make self-directed usability improvements across the platform. Ultimately, this helps build a more polished product experience, as these pairs can quickly address pain points, refine interactions, and deliver thoughtful improvements that make the platform more efficient and enjoyable to use.\n\nIn this iteration, [Anna Vovchenko](https://gitlab.com/anna_vovchenko) and I decided to focus on the continuous deployment ([CD](https://about.gitlab.com/topics/ci-cd/#what-is-continuous-deployment)) area of the product. Here is how we did it and what we learned.\n\n## Trying something new\n\nAs this was our second round going through the process, we wanted to make several small adjustments that in the end helped us deliver even more quality improvements to the product. These process improvements included: \n\n* **Extended timeline:** We decided this time around we wanted to extend the initiative to span two milestones. This gave us the time to tackle more complex problems, but also gave us space for additional planning at the start. \n* **Structured planning:** While it was encouraged in the past to work directly in merge requests, we found it helped to use the initial issue as a place to plan and seek out problems ahead of time. Rather than purely focusing on the ad-hoc, we incorporated a planning phase similar to milestone planning, helping the partnership identify and prioritize potential improvements beforehand.\n* **Product manager integration:** As we focused on one area for this round of the project, we also decided to involve the product manager of the team more actively in the process. This ensured alignment on larger changes, reduced surprises when MRs were merged and allowed us to gather valuable feedback throughout the implementation.\n* **Engaging the community:** We expanded our improvement efforts by inviting contributions from community members, accelerating our ability to implement fixes and enhancements across the platform.\n* **Strategic timing:** We chose to run this iteration during a traditionally slower period, allowing teams to focus more deeply on these improvements without competing priorities.\n\nThese refinements maintained the initiative's core strength of direct designer-engineer collaboration, while adding structure that helped our pair work more effectively.\n\n## What were the main improvements?\n\nDuring the two milestones, our pairing implemented several significant improvements that enhance the user experience across the CD space. Here's a look at what we accomplished:\n\n### Enhanced environment list view\n\nOne of the larger changes made during this cycle of \"Beautifying our UI\" was a redesigned Environment List page to make deployment information more accessible. Previously, users had to click through collapsible sections to view crucial deployment details, and viewing important details at a glance was difficult. Now, this information is immediately visible, bringing the most important deployment information to the forefront where users need it.\n\n![Beautifying UI - Environments page before](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097793/Blog/Content%20Images/Blog/Content%20Images/Before_Environments_Page_aHR0cHM6_1750097793301.png)\n\n**Before:** The original design relied on collapsible sections, requiring users to click to reveal deployment information. This meant that users couldn't immediately see the status of their deployments, making it harder to quickly assess the state of their environments.\n\n![Beautifying UI - Environments page after](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097793/Blog/Content%20Images/Blog/Content%20Images/After_Environments_Page_aHR0cHM6_1750097793301.png)\n\n**After:** The new design surfaces critical deployment information directly in the list view, including:\n\n* Deployment status with clear visual indicators\n* Who triggered the deployment along with timestamps\n* Commit information and version tags\n* Actions to take on the environment\n* Latest deployment indicators\n\nThis redesign eliminates the need for extra clicks and gives users immediate visibility into their deployment and environment statuses. The new layout maintains a clean interface while presenting more actionable information upfront.\n\n### Improved deploy keys filtering\n\nAnother larger enhancement was made to our deploy keys interface to improve searchability while maintaining performance. This change addresses a critical user need for quickly finding specific deploy keys in large repositories, which was broken when pagination was introduced earlier last year.\n\n![Beautifying UI - Deploy key before](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097793/Blog/Content%20Images/Blog/Content%20Images/Deploy_Key_Before_aHR0cHM6_1750097793303.png)\n\n**Before:** The previous interface displayed deploy keys in a paginated list without a dedicated search function. While pagination helped with performance when handling thousands of keys, users had lost the ability to quickly search through their deploy keys using the browser search functionality, forcing them to manually scan through multiple pages.\n\n![Beautifying UI - Deploy key after](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097793/Blog/Content%20Images/Blog/Content%20Images/Deploy_Key_After_aHR0cHM6_1750097793306.png)\n\n**After:** The new design introduces a dedicated search field at the top of the deploy keys list, allowing users to:\n\n* Quickly filter deploy keys by name or SHA\n* Maintain the performance benefits of pagination\n* Find specific keys without browsing through multiple pages\n\nThis improvement strikes the right balance between performance and usability, especially beneficial for teams managing numerous deploy keys across multiple projects.\n\n### Better Kubernetes agent management\n\nWe made significant improvements to the Kubernetes agent experience by simplifying the registration process and providing better visibility into agent status. These enhancements work together to create a smoother onboarding experience for teams getting started.\n\nOur first area of focus was streamlining how users register agents when they have configuration files ready to use. Previously, this process had several pain points that we wanted to address.\n\n![Beautifying UI - Agent before](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097793/Blog/Content%20Images/Blog/Content%20Images/Agent_Before_aHR0cHM6_1750097793309.png)\n\n**Before:**\n\n* Only showed connected and previously connected agents\n* Connection status was limited to \"Never connected\" or \"Not connected\"\n* No clear path to register new agents\n\n![Beautifying UI - Agent after](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097793/Blog/Content%20Images/Blog/Content%20Images/Agent_After_aHR0cHM6_1750097793310.png)\n\n**After:**\n\n* Added a new Available configurations tab showing all potential agent configurations\n* Clear \"Register an agent\" call-to-action button for each available configuration\n\nNext, we turned our attention to making the agent registration modal more intuitive. The previous design created some confusion that we wanted to resolve.\n\n![Beautifying UI - Registration before](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097793/Blog/Content%20Images/Blog/Content%20Images/Registration_Before_aHR0cHM6_1750097793311.png)\n\n**Before:**\n\n* Users faced a confusing dual-purpose search box that both found existing agents and created new ones\n* The workflow had too many decision points instead of a clear path forward\n* The process for creating vs. selecting an agent wasn't clearly separated\n\n![Beautifying UI - Registration after](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097793/Blog/Content%20Images/Blog/Content%20Images/Registration_After_aHR0cHM6_1750097793312.png)\n\n**After:**\n\n* Separated the interface into two clear options: bootstrap with Flux or create an agent through the UI\n* Streamlined the workflow into a more linear process\n* Made the distinction between creating new agents and selecting existing ones more obvious\n* Added a success message that clearly shows where to create the optional config file\n\nThese improvements make it immediately clear which agents need attention and provide a straightforward path to register new agents. The reorganized interface better supports both new users setting up their first agent and experienced users managing multiple agents.\n\n## Additional usability enhancements\n\nWhile working on major interface improvements, we also addressed several focused usability issues that significantly improve the day-to-day experience:\n\n* **Enhanced Kubernetes pod search:** Added search functionality for Kubernetes pods on the environment page, making it easier to locate specific pods in large deployments. This was showcased in the [GitLab 17.8 release post](https://about.gitlab.com/releases/2025/01/16/gitlab-17-8-released/#search-for-pods-on-the-dashboard-for-kubernetes).\n* **Improved Flux status visibility:** Added a \"stopped\" badge to the dashboard view when Flux sync is stopped, providing immediate visibility into sync status. This was also showcased in the [GitLab 17.8 release post](https://about.gitlab.com/releases/2025/01/16/gitlab-17-8-released/#view-paused-flux-reconciliations-on-the-dashboard-for-kubernetes). \n* **Better release information:** Implemented a clear view of deployments related to a release, improving deployment tracking and visibility.\n* **Streamlined environment search:** Fixed an issue where users couldn't effectively search the Environments page, improving navigation in large environment lists.\n* **Enhanced error message display:** Resolved issues with viewing Flux details when long error messages were present, making troubleshooting more straightforward.\n\n## Looking forward\n\nThe success of these improvements demonstrates the value of empowering our teams to make direct, meaningful changes to our experience. Beyond the product enhancements, one of the most valuable outcomes has been the strengthened relationship between our Frontend and Design teams. Working together closely on these improvements has fostered better understanding of each other's perspectives, workflows, and constraints, leading to more effective collaboration.\n\nThis deepened partnership has created a foundation for even better collaboration in our regular workflow, as team members now have stronger working relationships and shared understanding of each other's domains. We're excited to continue this initiative in future iterations, not just for the product improvements it generates, but also for its role in building stronger, more cohesive teams.\n\n> [Follow along with the \"Beautifying our UI\" project](https://handbook.gitlab.com/handbook/product/ux/product-design/#beautifying-our-ui) as we continue to make improvements to GitLab.\n\n## Read more\n\n- [How we overhauled GitLab navigation](https://about.gitlab.com/blog/navigation-research-blog-post/)\n- [GitLab dark mode is getting a new look](https://about.gitlab.com/blog/gitlab-dark-mode-is-getting-a-new-look/)\n- [Beautifying our UI: Giving GitLab build features a fresh look](https://about.gitlab.com/blog/beautifying-of-our-ui/)",[1031,1032,1033,717,9],"design","UX","UI",{"slug":1035,"featured":6,"template":698},"beautifying-our-ui-enhancing-gitlabs-deployment-experience","content:en-us:blog:beautifying-our-ui-enhancing-gitlabs-deployment-experience.yml","Beautifying Our Ui Enhancing Gitlabs Deployment Experience","en-us/blog/beautifying-our-ui-enhancing-gitlabs-deployment-experience.yml","en-us/blog/beautifying-our-ui-enhancing-gitlabs-deployment-experience",{"_path":1041,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1042,"content":1048,"config":1055,"_id":1057,"_type":14,"title":1058,"_source":16,"_file":1059,"_stem":1060,"_extension":19},"/en-us/blog/best-practices-to-set-up-organizational-hierarchies-that-scale",{"title":1043,"description":1044,"ogTitle":1043,"ogDescription":1044,"noIndex":6,"ogImage":1045,"ogUrl":1046,"ogSiteName":685,"ogType":686,"canonicalUrls":1046,"schema":1047},"Best practices to set up organizational hierarchies that scale","Learn how to model organizational hierarchy in GitLab. Create structures with clear lines of communication, strategic alignment, and more, while following Agile principles.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098165/Blog/Hero%20Images/Blog/Hero%20Images/agile_agile.png_1750098164666.png","https://about.gitlab.com/blog/best-practices-to-set-up-organizational-hierarchies-that-scale","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Best practices to set up organizational hierarchies that scale\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Amanda Rueda\"}],\n        \"datePublished\": \"2024-07-22\",\n      }",{"title":1043,"description":1044,"authors":1049,"heroImage":1045,"date":1051,"body":1052,"category":1053,"tags":1054},[1050],"Amanda Rueda","2024-07-22","Maximizing the benefits of your GitLab subscription begins with an effective organizational setup. Here’s a straightforward guide to configuring your group, subgroup, and project structure to enhance your GitLab experience.\n\n## Understanding the structure: Groups, subgroups, and projects\n\nGroups and projects allow you to model your organizational hierarchy, enabling advanced permissions management and “team of teams” planning. Use groups and subgroups for strategic planning and configuration management that cascades into subgroups and projects lower in the hierarchy.\n\nBeyond this, you can also model your value streams, enhancing project management and collaboration across your organization.\n\n- **Project level (team level)**\n    - Nested within groups or subgroups, projects are where your actual work happens. This is where repositories live, and settings specific to the project are managed. Zoom into day-to-day activities and detailed project tracking at this level.\n    - Effective project configuration helps maintain clean, organized data, which is essential for accurate reporting and analysis.\n\n- **Subgroup level (team of teams)**\n    - Subgroups provide granular permissions management and can be tailored to specific team or project needs, ensuring consistent workflows across your organization.\n    - Subgroups function as clusters of related projects, similar to how a \"team of teams\" operates in Agile.\n    - This level is ideal for managing several teams working towards a common product or service. It facilitates cross-project visibility and integration, which supports synchronization between teams to align on interdependencies and shared objectives.\n\n- **Group level (team of team of teams)**\n    - Think of groups as your organizational pillars within GitLab where broad permissions and access are managed.\n    - At the highest level, groups encompass multiple subgroups and represent the strategic tier of project management, akin to the \"team of team of teams\" in Agile.\n    - This level sets the overarching goals and strategies, defining settings and allocating resources across projects and subgroups to ensure alignment with the company's broad business objectives.\n\nBy structuring your organization with GitLab, you parallel your chosen Agile methodology, which can help you apply Agile principles more naturally across your projects. This structure promotes clear lines of communication, efficient resource management, and strategic alignment, all while maintaining the flexibility and responsiveness inherent to Agile methodologies.\n\n> Keep up with news and insights about [GitLab Agile planning](https://about.gitlab.com/blog/categories/agile-planning/).\n\n## Leveraging the GitLab inheritance model\n\nOne of GitLab's powerful features is its [inheritance model](https://docs.gitlab.com/ee/tutorials/scrum_events/index.html#understanding-the-inheritance-model-in-gitlab), which allows settings, permissions, and configurations made at higher levels to automatically apply to lower levels within the hierarchy. Conversely, data at lower levels is instinctively available at higher levels in the structure. With the inheritance model, you gain visibility across your entire portfolio from within higher-level groups while providing distinct locations lower in the hierarchy for individual teams to manage their work.\n\nExamples:\n- **Create milestones and labels in your higher-level groups** to cascade down to all subgroups and projects promoting consistency and adherence to organizational standards.\n- **Issues and epics** in lower level projects and subgroups roll up your value stream hierarchy for ease of reference by program management and the executive layer.\n- **Manage user permissions at the group level or top-level subgroup** to optimize permissions and access control. This can simplify access control management and ensure that the right people have the right access across multiple projects without the need for repeated configuration.\n\nThese tips not only streamline administrative overhead but also reinforce security and compliance by ensuring that changes at the higher level consistently propagate downwards.\n\n![Organizational hierarchy diagram](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098179/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098179305.png)\n\n## Best practices for GitLab setup\n\nWhen setting up your GitLab organizational hierarchy, we recommend the following options depending on your organization's needs. Self-managed customers have the option to omit the \"Company Name\" root group layer, as this extra level of organization is not necessary for self-managed deployments. This flexibility ensures that your GitLab setup is tailored to your specific organizational structure and deployment preferences.\n\n### Option 1: Permissions and access are granted at the organizational subgroup level\n\nThis option is ideal for complex permission structures or large organizations needing efficient project sharing across numerous users.\n\n#### Example structure\n\n- Organizational Group\n    - Handles broad permissions typically through integrations with corporate provisioning systems.\n    - Users are added to subgroups, which will serve as the foundation for sharing the entire group with another [group](https://docs.gitlab.com/ee/user/group/manage.html#share-a-group-with-another-group) or a [project](https://docs.gitlab.com/ee/user/project/members/share_project_with_groups.html) to minimize the overhead of direct user management.\n    - When creating user groups, you can utilize [group mentions](https://docs.gitlab.com/ee/user/discussions/index.html#mentions) throughout GitLab to mention large groups of users at a time.\n\n- Development Group\n    - Provides executive-level and program-management-level visibility across all development projects at the highest development group level.\n    - Features are created at the subgroup level for access across multiple repos.\n    - Projects are created to hold development repos; this is the level for Team visibility.\n\n![organizational chart for subgroup level](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098179/Blog/Content%20Images/Blog/Content%20Images/Image_1_aHR0cHM6_1750098179306.png)\n\n### Option 2: Permissions and access are granted at any level\nThis option is best for smaller organizations with less complex access requirements. Users are added individually to the divisional groups, subgroups, or projects as access is required. This provides direct control over project management and operational visibility.\n\n#### Highlights\n- Users can be added to a group at the top of the hierarchy or to the lower-level subgroup/project depending on the granularity of access needs. Each member would need to be individually added rather than a single task of sharing a group.\n- Executive-level and program-management-level visibility across all development projects at the highest development group level.\n- Features are created at the subgroup level for access across multiple repos.\n- Projects are created to hold development repos; this is the level for Team visibility.\n\n![Permissions granted at any level](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098179/Blog/Content%20Images/Blog/Content%20Images/Image_2_aHR0cHM6_1750098179307.png)\n\n### Additional configuration considerations\n\n- Milestones and iterations\n    - Create group-level milestones for broad visibility or when milestones need to be shared across groups.\n    - Create milestones at the project level when the milestone is specific to a single project.\n    - For teams working across different groups, setting iterations at the parent group level is beneficial for unified tracking.\n\n- Data management\n    - Leverage GitLab's roadmaps, boards, and listing pages to pull data that reflects your organizational setup. This helps you visualize progress and plan effectively across different levels of your structure.\n    - GitLab makes data available in higher-level groups even when the data is created in lower levels.\n    - Create your views at higher levels when you want to view data across groups and projects, and at lower levels when you want to hone in on a specific group or project’s data.\n\n- Template creation\n    - Create higher-level templates to ensure they cascade to all subsequent subgroups and projects, mixing general guidelines with project-specific requirements.\n    - Templates are created within their own repository within the applicable group ([related documentation](https://docs.gitlab.com/ee/user/project/description_templates.html)).\n\n- Labels\n    - Create higher-level labels to ensure they cascade to all subsequent subgroups and projects, mixing org labels with project-specific labels.\n    - Use scoped labels to define organizational structures like teams and workflow status.\n\n![Issue board with labels](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098179/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750098179310.png)\n\n## Leveraging GitLab’s features for optimal performance\n\nImplementing the right structure in GitLab not only streamlines the management of your software projects but also enhances the visibility across different levels of your organization, ensuring that everyone from the top management to individual contributors has the information they need to make informed decisions.\n\n> Get started modeling organizational hierarchy with [a free trial of GitLab Ultimate](https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/blog&glm_content=default-saas-trial).\n","agile-planning",[908,695,9],{"slug":1056,"featured":6,"template":698},"best-practices-to-set-up-organizational-hierarchies-that-scale","content:en-us:blog:best-practices-to-set-up-organizational-hierarchies-that-scale.yml","Best Practices To Set Up Organizational Hierarchies That Scale","en-us/blog/best-practices-to-set-up-organizational-hierarchies-that-scale.yml","en-us/blog/best-practices-to-set-up-organizational-hierarchies-that-scale",{"_path":1062,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1063,"content":1068,"config":1076,"_id":1078,"_type":14,"title":1079,"_source":16,"_file":1080,"_stem":1081,"_extension":19},"/en-us/blog/betstudios-cto-on-improving-ci-cd-capabilities-with-gitlab-premium",{"title":1064,"description":1065,"ogTitle":1064,"ogDescription":1065,"noIndex":6,"ogImage":772,"ogUrl":1066,"ogSiteName":685,"ogType":686,"canonicalUrls":1066,"schema":1067},"Betstudios CTO on improving CI/CD capabilities with GitLab Premium","Read why Betstudios upgraded to GitLab Premium and how their software development process has changed in this Q&A with their CTO Rafael Campuzano.","https://about.gitlab.com/blog/betstudios-cto-on-improving-ci-cd-capabilities-with-gitlab-premium","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Betstudios CTO on improving CI/CD capabilities with GitLab Premium\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Kristina Weis\"}],\n        \"datePublished\": \"2023-12-06\",\n      }",{"title":1064,"description":1065,"authors":1069,"heroImage":772,"date":1071,"body":1072,"category":1073,"tags":1074},[1070],"Kristina Weis","2023-12-06","Since joining [Betstudios](https://betstudios.com/en/) (soon to be part of WA. Technology Group) as their CTO earlier this year, Rafael Campuzano has been focused on finding ways to help the software development team save time, automate manual processes, and have a better experience.\n\nThe team had been using GitLab’s free tier for source code management and version control for several years, but they wanted to improve their CI/CD capabilities so they made the decision to upgrade to GitLab Premium. We talked with Rafael about what the team has already been able to do, how their software development process has changed, and what they’re planning to do next.\n\n__What led you to upgrade from GitLab’s free tier to GitLab Premium?__\n\nThe main motivation when I joined Betstudios was to have the service hosted outside our office servers and remove the hassle of having to manage it ourselves, so we decided to move to the SaaS model. Once that decision was made, we needed a certain level of reliability but mainly we wanted to improve our [CI/CD capabilities](https://about.gitlab.com/topics/ci-cd/), which was the reason for upgrading to the Premium plan.\n\n__What made you choose GitLab?__\n\nBetstudios was already using GitLab and I had experience from previous companies and knew that GitLab is a quite complete tool for managing code and beyond. Besides, I also liked the openness and the overall culture of the company, based on transparency and innovation.\n\n__How has your software development process changed since adopting GitLab Premium?__\n\nWe’re a small team and haven’t been able to take full advantage of all the power that GitLab Premium brings yet, but we’ve started with some deployment automations and that’s taken away a lot of unnecessary work — and mistakes — from the teams.\n\n__What benefits have you seen since you started using GitLab Premium?__\n\nWe’re just getting started, but we have already saved around five hours per week per Team Lead by automating much of the deployment process, and soon we expect to take that to 10 hours per week of time saved. Now they can dedicate this time to better planning, code reviews, and even coding.\n\n__You also moved from GitLab's free tier to GitLab Premium at your last company. How did that experience affect your decision to upgrade to GitLab Premium at Betstudios?__\n\nI was responsible for infrastructure at [EveryMatrix](https://about.gitlab.com/customers/everymatrix/), which included the support of the GitLab servers on-prem. We needed HA (high availability), so we decided to move to the Premium plan. However, we soon realized we made many development teams happy, because they wanted to use the more advanced features that the Premium plan was coming with. The development teams showed it was a great decision for them, and our relationship with GitLab the company was always great, so when I arrived at Betstudios it was a clear move to make.\n\n__What would you like to do next with GitLab’s DevSecOps Platform?__\n\nI would like to explore the CI/CD capabilities with Kubernetes, and I’m encouraging our Engineering teams to use all the capabilities like code reviews, advanced merge requests, and CI/CD more and more. I’ve also seen that many companies are using ArgoCD in combination with GitLab and I’m going to explore if there is a way to do everything just using GitLab.\n\n__Do you have any advice for teams getting started with GitLab?__\n\nIt is an all-in-one platform, so you do not need to build complex systems for different tasks around your code production, methodologies, and operations. GitLab is easy to use and has a great team that will help you to get the most out of it.\n\n> Read more GitLab customer stories on our [customers page](https://about.gitlab.com/customers/).","customer-stories",[951,9,109,1075],"customers",{"slug":1077,"featured":6,"template":698},"betstudios-cto-on-improving-ci-cd-capabilities-with-gitlab-premium","content:en-us:blog:betstudios-cto-on-improving-ci-cd-capabilities-with-gitlab-premium.yml","Betstudios Cto On Improving Ci Cd Capabilities With Gitlab Premium","en-us/blog/betstudios-cto-on-improving-ci-cd-capabilities-with-gitlab-premium.yml","en-us/blog/betstudios-cto-on-improving-ci-cd-capabilities-with-gitlab-premium",{"_path":1083,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1084,"content":1090,"config":1096,"_id":1098,"_type":14,"title":1099,"_source":16,"_file":1100,"_stem":1101,"_extension":19},"/en-us/blog/build-a-new-website-in-a-few-easy-steps-with-gitlab-pages",{"title":1085,"description":1086,"ogTitle":1085,"ogDescription":1086,"noIndex":6,"ogImage":1087,"ogUrl":1088,"ogSiteName":685,"ogType":686,"canonicalUrls":1088,"schema":1089},"Build a new website in a few easy steps with GitLab Pages ","This tutorial shows you how to create and host your personal website using GitLab Pages with a ready-to-use template that you can customize in minutes.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097716/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%281%29_7c3TDgNgct9xQbmTJSw0de_1750097716096.png","https://about.gitlab.com/blog/build-a-new-website-in-a-few-easy-steps-with-gitlab-pages","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Build a new website in a few easy steps with GitLab Pages \",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Alex Fracazo\"}],\n        \"datePublished\": \"2025-03-03\",\n      }",{"title":1085,"description":1086,"authors":1091,"heroImage":1087,"date":1093,"body":1094,"category":717,"tags":1095},[1092],"Alex Fracazo","2025-03-03","A personal website is more than just a utility for digital creators and professionals in tech. It's a representation of your brand. But creating one from scratch can be time-consuming and expensive.\n\nWith [GitLab Pages](https://docs.gitlab.com/user/project/pages/), you can host your website with built-in features, including SSL certificates and a GitLab-provided domain. All of this is available on GitLab's free tier, making it an efficient solution for hosting your professional presence.\n\nWe're going to take you on a fun journey to craft a stunning personal website using GitLab Pages! We’ve got a super simple, versatile template that you can easily jazz up to reflect your unique style. So grab your favorite snack, get comfy, and let’s turn your online presence into something truly fabulous!\n\n## Prerequisites\n\nYou will need the following prerequisites before getting started:\n\n* A GitLab account (the [free tier](https://about.gitlab.com/pricing/) is sufficient)  \n* Basic familiarity with HTML/CSS  \n* Content and images you want to add to your website (optional)\n\nOnce you’re set up with a GitLab account and have your content handy, you can move on to the next steps.\n\n## Step 1: Create a new project\n\n1. Sign on to your GitLab account and create a project.\n\n![GitLab Pages tutorial - welcome screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097724/Blog/Content%20Images/Blog/Content%20Images/Capture-2025-02-27-183716_aHR0cHM6_1750097724662.png)\n\n2. Click **Create blank project**.\n\n![GitLab Pages tutorial - Create new project screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/Capture-2025-02-27-183814_aHR0cHM6_1750097724663.png)\n\n3. Fill in your project details:\n    * Name your project `yourusername.gitlab.io`. Replace `yourusername` with your GitLab username. **Tip:** The project name determines your website’s URL. If you name your project `yourusername.gitlab.io`, your website will be available at `https://yourusername.gitlab.io` with no additional path. However, if you use any other project name, your site will be available at `https://yourusername.gitlab.io/project-name`.\n    * Make the project public.\n4. Click **Create project**.\n\n![GitLab Pages tutorial - Create blank project screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750097724666.png)\n\n![GitLab Pages tutorial - customized get started page](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097724668.png)\n\n## Step 2: Add the template files\n\nStart by creating two new files in your repository:\n\n![GitLab Pages tutorial - Add new files to personal page](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image13_aHR0cHM6_1750097724669.png)\n\n1. First, create `index.html`:\n    * In your project, click the **+** button and select **New file**.\n    * Name the file `index.html`.\n![GitLab Pages tutorial - new file page](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image14_aHR0cHM6_1750097724671.png)\n    * Add your HTML content.\n        * Use the example HTML provided below. (Pro tip: Users can ask GitLab Duo Chat to generate HTML for enhanced functionality.)\n\n```    \n\u003C!DOCTYPE html>\n\u003Chtml>\n\u003Chead>\n    \u003Cmeta charset=\"utf-8\"/>\n    \u003Ctitle>[Your Name] - [Your Title]\u003C/title>\n    \u003Cmeta name=\"description\" content=\"[Your Name] is a [Your Title].\"/>\n    \u003Cmeta name=\"author\" content=\"[Your Name]\"/>\n    \u003Cmeta property=\"og:title\" content=\"[Your Name]\" />\n    \u003Cmeta property=\"og:description\" content=\"[Your Title]\" />\n    \u003Cmeta property=\"og:image\" content=\"og.png\" />\n    \u003Cmeta name=\"viewport\" content=\"width=device-width,initial-scale=1\"/>\n    \u003Clink href=\"https://unpkg.com/basscss@8.0.2/css/basscss.min.css\" rel=\"stylesheet\">\n    \u003Clink href=\"style.css\" rel=\"stylesheet\">\n    \u003Clink rel=\"shortcut icon\" type=\"image/png\" href=\"favicon.png\"/>\n\u003C/head>\n\u003Cbody>\n\u003Cdiv class=\"content\" id=\"content\">\n  \u003Cdiv class=\"p2 sm-p4 mt2 sm-mt4 mb2 sm-mb4\">  \n  \u003Cdiv class=\"fade mt3\">\n    \u003Ca target=\"_new\" href=\"[Your Linkedin URL]\">\n      \u003Cimg class=\"photo\" src=\"profile.png\" width=\"64\" height=\"64\">\n    \u003C/a>\n  \u003C/div>\n  \u003Ch2 class=\"mb0 mt4 fade\">\n    Hello, I'm [Your Name] \n    \u003Cspan class=\"smallcaps\">(\u003C/span>\n    \u003Ca target=\"_new\" href=\"[Your Linkedin URL]\">@[Your Handle]\u003C/a>\n    \u003Cspan class=\"smallcaps\">)\u003C/span>\n  \u003C/h2>\n  \u003Ch2 class=\"mt0 mb4 fade gray\">\n    I'm a [Your Title]\n  \u003C/h2>\n  \u003Cp class=\"mb4 fade\">\n    I'm a [Your Role] at [Your Company], [Brief company description].\n  \u003C/p>\n  \u003Cdiv class=\"fade\">\n    \u003Cp class=\"fade mb4\">\n      Your personal statement about what you do and what you're interested in. Add your contact preferences here.\n    \u003C/p>\n  \u003C/div>\n  \u003Cp class=\"fade mb4\">\n    \u003Cspan class=\"gray\">—\u003C/span> \n    [Your Name] \n    \u003Cspan class=\"smallcaps>(\u003C/span>\n    \u003Ca target=\"_new\" href=\"[Your Linkedin URL]\">@[Your Handle]\u003C/a>\n    \u003Cspan class=\"smallcaps\">)\u003C/span>\n  \u003C/p>\n  \u003C/div>\n\u003C/div>\n\u003C/body>\n\u003C/html> \n```\n\n* Add a commit message (e.g., \"Added index.html\").\n  * Click **Commit changes**.\n\n2. Create `style.css` (follow same steps above).\n\n```\nbody {\n  margin: 0;\n  padding: 0;\n  background: #000;\n  color: #f4f4f4;\n  font-family: \"Graphik Web\", system-ui, -apple-system, BlinkMacSystemFont, \"Helvetica Neue\", \"Helvetica\", \"Segoe UI\", Roboto, Ubuntu, sans-serif;\n  font-weight: 400;\n  font-smooth: antialiased;\n  -webkit-font-smoothing: antialiased;\n  -moz-osx-font-smoothing: grayscale;\n}\n\na {\n  color: #ff310a;\n  text-decoration: none;\n}\n\na:hover {\n  color: #CFEF54\n}\n\n.content {\n  max-width: 40rem;\n  margin: 0 auto;\n}\n\nimg.photo {\n  border-radius: 50%;\n}\n\np {\n  font-size: 1.5rem;\n  line-height: 1.4;\n  margin: 0;\n  letter-spacing: -0.05rem;\n}\n\nh2 {\n  font-weight: 400;\n  line-height: 1.3;\n  letter-spacing: -0.05rem;\n}\n\n.smallcaps {\n  font-variant: small-caps;\n  color:#333;\n}\n\n.gray{\n  color: #999;\n}\n\n.preloader {\n  display: flex;\n  justify-content: center;\n  align-items: center;\n  height: 100vh;\n  height: -moz-available;\n  height: -webkit-fill-available;\n  height: fill-available;\n  width: 100%;\n  background: #000;\n  position: fixed;\n  top: 0;\n  left: 0;\n  z-index: 9999;\n  transition: opacity 0.3s linear;\n  transform: translate3d(0, 0, 0);\n}\n\nbody.loaded .preloader {\n  opacity: 0;\n}\n\n.fade {\n  animation: fadeIn 1s ease-in-out both;\n}\n\n.fade:nth-child(2) {\n\tanimation-delay: 1s;\n}\n\n.fade:nth-child(3) {\n\tanimation-delay: 2s;\n}\n\n.fade:nth-child(4) {\n\tanimation-delay: 3s;\n}\n\n.fade:nth-child(5) {\n\tanimation-delay: 4s;\n}\n\n.fade:nth-child(6) {\n\tanimation-delay: 5s;\n}\n\n.fade:nth-child(7) {\n\tanimation-delay: 6s;\n}\n\n.fade:nth-child(8) {\n\tanimation-delay: 7s;\n}\n\n.fade:nth-child(9) {\n\tanimation-delay: 8s;\n}\n\n.fade:nth-child(10) {\n\tanimation-delay: 9s;\n}\n\n.fade:nth-child(11) {\n\tanimation-delay: 10s;\n}\n\n.fade:nth-child(12) {\n\tanimation-delay: 11s;\n}\n\n.fade:nth-child(13) {\n\tanimation-delay: 12s;\n}\n\n@keyframes fadeIn {\n\tfrom {\n\t\topacity: 0;\n\t\ttransform: translate3d(0, 0%, 0);\n\t}\n\tto {\n\t\topacity: 1;\n\t\ttransform: translate3d(0, 0, 0);\n\t}\n} \n\n```\n\n## Step 3: Configure GitLab CI file\n\nThere are two ways to create the GitLab CI configuration file that tells GitLab how to build and deploy your site:\n\n![GitLab Pages tutorial - optimize your workflow with CI/CD pipelines screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097724672.png)\n\n**Option 1: Use Pipeline Editor (recommended)**\n\n1. Go to your project's **Build > Pipeline Editor**.\n\n![GitLab Pages tutorial - pipeline editor/main branch](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image12_aHR0cHM6_1750097724673.png)\n\n2. The `.gitlab-ci.yml` file will be automatically created. \n3. Copy and paste the following configuration: \n\n```\npages:\n  stage: deploy\n  script:\n    - mkdir .public\n    - cp -r * .public\n    - mv .public public\n  artifacts:\n    paths:\n      - public\n  only:\n    - main\n```\n\n![GitLab Pages Tutorial - New file in window](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097724674.png)\n\n**Option 2: Manual creation**\n\nIf you prefer to create the file manually: \n1. Create a new file named `.gitlab-ci.yml`. \n2. Add the following configuration:\n\n```\npages:\n  stage: deploy\n  script:\n    - mkdir .public\n    - cp -r * .public\n    - mv .public public\n  artifacts:\n    paths:\n      - public\n  only:\n    - main\n```\n\nThe key to getting your site running is the GitLab CI configuration file. This file tells GitLab how to build and deploy your site.\n\nLet's break down what each part does:\n\n**The script part**\n\n```\nscript:\n  - mkdir .public\n  - cp -r * .public\n  - mv .public public\n```\n\nThis creates a folder called `public` and copies all your website files into it. GitLab Pages uses this folder to serve your website by default, though you can [customize the publishing folder](https://docs.gitlab.com/user/project/pages/introduction/#customize-the-default-folder) if needed.\n\n**The only part**\n\n```\nonly:\n  - main\n\n```\n\nThis tells GitLab to only update your website when changes are made to the main branch. This helps prevent accidental updates from experimental changes.\n\n## Step 4: Watch the magic happen\n1. Commit all your changes.\n2. Go to **Build > Pipelines** to watch your deployment.\n3. Wait for the pipeline to complete successfully (indicated by a green checkmark).\n\n![GitLab Pages tutorial - pipeline running for new page](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750097724676.png)\n\n![GitLab Pages tutorial - pipeline passed for new page](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097724677.png)\n\n## Step 5: Access your website\n\nOnce the pipeline completes successfully, your website will be available at: **https://[yourusername].gitlab.io/** .\n\nYou can find an overview of your deployed website and additional settings in your project's **Deploy > Pages** section. Here you'll find useful information. including: \n\n* Your website's access URLs   \n* Domain settings  \n  * By default GitLab enables **Unique domain**. Make sure to disable it if you want to use the GitLab-provided domain. Learn more with the [unique domain documentation](https://docs.gitlab.com/ee/user/project/pages#unique-domains).  \n* HTTPS certificates status   \n* Recent deployments   \n* Additional configuration options\n* Custom domains\n\nThis section is particularly helpful when setting up custom domains or troubleshooting deployment issues.\n\n**Customize your site**\n\n![GitLab Pages tutorial - customize site](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750097724678.png)\n\n1. Replace all “Your ...” placeholders in `index.html` with your information.\n\n![GitLab Pages tutorial - upload file to customize page](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097725/Blog/Content%20Images/Blog/Content%20Images/image11_aHR0cHM6_1750097724679.png)\n\n2. Add your images:\n    - profile.png - your profile photo (64x64px)\n    - favicon.png - your site favicon (32x32px)\n    - Og.png - OpenGraph image for social media preview (1200x630px)\n\n**See it in action**\n\nIf you're familiar with GitLab, feel free to [fork my repository](https://gitlab.com/fracazo/fracazo.gitlab.io) to get started quickly. \n\nHere is the final result:\n[https://fracazo.gitlab.io/](https://fracazo.gitlab.io/)\n\n**Common issues and solutions**\n- By default, GitLab enables \"Unique domain\" for Pages projects. To use the simpler GitLab-provided domain (like `username.gitlab.io`), go to **Deploy > Pages** and disable the \"Use unique domain\" option. While unique domains offer some technical advantages, like better asset path handling, you might prefer the cleaner URL structure for a personal website.\n- If your pipeline fails, check that you're using `main` instead of `master` in your `.gitlab-ci.yml` file.\n- Ensure your group and project is public for GitLab Pages to work.\n- If any jobs fail in your pipeline, you can check the job log for detailed error messages to help with troubleshooting.\n\nWith GitLab Pages and this template, you can have a professional/personal website up and running in minutes. The template is clean, responsive, and easy to customize. As you grow professionally, you can easily update your site directly through GitLab. \n\nYou can automate the deployment process by leveraging GitLab's CI/CD capabilities and focusing on creating great content.\n\nThe best part? All of this is available on GitLab's free tier, making it an excellent option for free hosting of your personal projects, documentation sites, or even small business websites. For more advanced features and configurations, check out our [Pages documentation](https://docs.gitlab.com/ee/user/project/pages/).\n\n## What’s next for GitLab Pages?\nWe're constantly working to make GitLab Pages even better for creators and developers. Here are some exciting improvements coming soon: \n\n### Simplified domain management \nWe have some exciting updates coming to GitLab Pages that will make managing your domains even easier and more fun! You can look forward to a streamlined dashboard that brings all your domain settings together in one friendly space, making everything easily accessible. \n\nYou’ll stay informed with real-time updates on your DNS and SSL certificate statuses, helping you keep your domains secure and running smoothly. \n\n### Custom domain setup\nSetting up custom domains will be a breeze with our easy-to-follow process, guiding you every step of the way. Plus, you'll be able to set up your custom domains to automatically redirect visitors from your old website address to your new one – perfect for when you want all your traffic to go to one main website. Learn more about [custom domains](https://docs.gitlab.com/ee/user/project/pages/custom_domains_ssl_tls_certification/index.html#set-up-a-custom-domain).\n\n> Get started with GitLab Pages today with [GitLab's free tier](https://about.gitlab.com/pricing/)! \n\n## Learn more\n- [GitLab Pages features review apps and multiple website deployment](https://about.gitlab.com/blog/gitlab-pages-features-review-apps-and-multiple-website-deployment/)\n- [GitLab Pages: Multiple website deployment documentation](https://docs.gitlab.com/user/project/pages/#parallel-deployments)\n- [GitLab Pages examples](https://gitlab.com/pages)",[719,9],{"slug":1097,"featured":6,"template":698},"build-a-new-website-in-a-few-easy-steps-with-gitlab-pages","content:en-us:blog:build-a-new-website-in-a-few-easy-steps-with-gitlab-pages.yml","Build A New Website In A Few Easy Steps With Gitlab Pages","en-us/blog/build-a-new-website-in-a-few-easy-steps-with-gitlab-pages.yml","en-us/blog/build-a-new-website-in-a-few-easy-steps-with-gitlab-pages",{"_path":1103,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1104,"content":1110,"config":1115,"_id":1117,"_type":14,"title":1118,"_source":16,"_file":1119,"_stem":1120,"_extension":19},"/en-us/blog/build-and-run-containers-in-remote-development-workspaces",{"title":1105,"description":1106,"ogTitle":1105,"ogDescription":1106,"noIndex":6,"ogImage":1107,"ogUrl":1108,"ogSiteName":685,"ogType":686,"canonicalUrls":1108,"schema":1109},"Build and run containers in Remote Development workspaces","Use this easy-to-follow tutorial to create a secure, ephemeral, reproducible development environment in GitLab that can replace your local environments.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663857/Blog/Hero%20Images/blog-image-template-1800x945__12_.png","https://about.gitlab.com/blog/build-and-run-containers-in-remote-development-workspaces","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Build and run containers in Remote Development workspaces\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Vishal Tak\"}],\n        \"datePublished\": \"2025-03-03\",\n      }",{"title":1105,"description":1106,"authors":1111,"heroImage":1107,"date":1093,"body":1113,"category":717,"tags":1114},[1112],"Vishal Tak","Development environments often require the ability to build and run\ncontainers as part of their local development. Securely running containers\nwithin containers can be challenging. This article will provide a\nstep-by-step guide to securely build and run containers in a workspace.\n\n\nYou will learn how to:\n\n- [Create a Kubernetes cluster on AWS\nEKS](#create-a-kubernetes-cluster-on-aws-eks)\n\n- [Configure Sysbox](#configure-sysbox)\n\n- [Configure GitLab agent for Kubernetes and GitLab Workspaces\nProxy](#configure-gitlab-agent-for-kubernetes-and-gitlab-workspaces-proxy)\n\n- [Configure sudo access for a workspace with\nSysbox](#configure-sudo-access-for-a-workspace-with-sysbox)\n\n- [Configure Ingress Controller](#configure-ingress-controller)\n\n- [Build containers inside a\nworkspace](#build-containers-inside-a-workspace)\n\n- [Run containers inside a workspace](#run-containers-inside-a-workspace)\n\n- [Get started today](#get-started-today)\n\n\n## Create a Kubernetes cluster on AWS EKS\n\nInstall the [AWS\nCLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html)\non your local machine. Next, configure a [named\nprofile](https://docs.aws.amazon.com/cli/latest/reference/configure/) and\nexport it to ensure all the following `aws` commands use the set\ncredentials.\n\n\n```\n\naws configure --profile gitlab-workspaces-container-demo\n\nexport AWS_PROFILE=gitlab-workspaces-container-demo\n\n```\n\n\nInstall [eksctl](https://eksctl.io/installation/), a CLI to interact with\nAWS EKS. Let’s now create a Kubernetes 1.31 cluster on AWS EKS with 1 node\nof Ubuntu 22.04 of `c5.2xlarge` instance type. The nodes can autoscale from\n0-20 nodes and each node will have a label `sysbox-install: yes` . This will\nbe explained later in the article.\n\n\n```\n\nexport CLUSTER_NAME=\"gitlab-workspaces-container-demo-eks-sysbox\"\n\n\neksctl create cluster \\\n  --name \"${CLUSTER_NAME}\" \\\n  --version 1.31 \\\n  --node-ami-family=Ubuntu2204 \\\n  --nodes=1 \\\n  --nodes-min=0 \\\n  --nodes-max=20 \\\n  --instance-types=c5.2xlarge \\\n  --node-labels \"sysbox-install=yes\" \\\n  --asg-access \\\n  --external-dns-access \\\n  --full-ecr-access\n```\n\n\nCreate an [IAM\nOIDC](https://docs.aws.amazon.com/eks/latest/userguide/associate-service-account-role.html)\nprovider for your cluster.\n\n\n```\n\neksctl utils associate-iam-oidc-provider --cluster \"${CLUSTER_NAME}\"\n--approve\n\n```\n\n\nCreate IAM role for [EBS\nadd-on](https://docs.aws.amazon.com/eks/latest/userguide/ebs-csi.html) for\nEKS.\n\n\n```\n\neksctl create iamserviceaccount \\\n  --name ebs-csi-controller-sa \\\n  --namespace kube-system \\\n  --cluster \"${CLUSTER_NAME}\" \\\n  --role-name \"AmazonEKS_EBS_CSI_DriverRole_${CLUSTER_NAME}\" \\\n  --role-only \\\n  --attach-policy-arn arn:aws:iam::aws:policy/service-role/AmazonEBSCSIDriverPolicy \\\n  --approve\n```\n\n\nCreate Amazon EBS CSI driver add-on for Amazon EKS cluster.  \n\n\n```\n\neksctl utils describe-addon-versions --kubernetes-version 1.31 | grep\naws-ebs-csi-driver\n\n\nexport AWS_ACCOUNT_ID=\"UPDATE_ME\"\n\n\neksctl create addon \\\n  --cluster \"${CLUSTER_NAME}\" \\\n  --name aws-ebs-csi-driver \\\n  --version latest \\\n  --service-account-role-arn \"arn:aws:iam::${AWS_ACCOUNT_ID}:role/AmazonEKS_EBS_CSI_DriverRole_${CLUSTER_NAME}\" \\\n  --force\n```\n\n\nInstall [kubectl](https://kubernetes.io/docs/reference/kubectl/), a command\nline tool for communicating with a Kubernetes cluster's control plane, using\nthe Kubernetes API.\n\n\nLet’s get the\n[kubeconfig](https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/)\nof the created cluster.\n\n\n```\n\naws eks update-kubeconfig --name \"${CLUSTER_NAME}\"\n\n```\n\n\n## Configure Sysbox\n\n\n[Sysbox](https://github.com/nestybox/sysbox) is a container runtime that\nimproves container isolation and enables containers to run the same\nworkloads as virtual machines.\n\n\n[Install](https://github.com/nestybox/sysbox#installation) Sysbox on the\nKubernetes cluster using the `sysbox-deploy-k8s daemonset`.\n\n\n```\n\ncurl\nhttps://raw.githubusercontent.com/nestybox/sysbox/refs/tags/v0.6.6/sysbox-k8s-manifests/sysbox-install.yaml\n-o sysbox-install.yaml\n\n```\n\n\nBecause of how Sysbox releases itself, it first created a git tag, which\nruns a pipeline to build assets after which the YAML files for the\n`sysbox-deploy-k8s daemonset` are updated. Thus, we need to update the\nDaemonSet's `spec.template.soec.containers[0].image` to\n[registry.nestybox.com/nestybox/sysbox-deploy-k8s:v0.6.6-0](https://github.com/nestybox/sysbox/blob/46ba726e8e894aa22e20465a32d22dfa2863ec12/sysbox-k8s-manifests/sysbox-install.yaml#L66)\n.\n\n\n```\n\nnew_image_value=\"registry.nestybox.com/nestybox/sysbox-deploy-k8s:v0.6.6-0\"\n\ntemp_file=$(mktemp)\n\nsed -E \"s|^([[:space:]]*image:)[[:space:]]*.*|\\1 $new_image_value|\"\n\"sysbox-install.yaml\" > \"$temp_file\"\n\nmv \"$temp_file\" \"sysbox-install.yaml\"\n\n```\n\n\nApply the YAML file to Kubernetes and ensure all the pods of the DaemonSet\nare running.\n\n\n```\n\nkubectl apply -f sysbox-install.yaml\n\nkubectl get pod -A\n\nkubectl -n kube-system get daemonset\n\n```\n\n\nVerify the installation by creating a pod which uses Sysbox container\nruntime.\n\n\n```\n\ncat \u003C\u003CEOF | kubectl apply -f -\n\napiVersion: v1\n\nkind: Pod\n\nmetadata:\n  name: sysbox-verification-pod\n  namespace: default\n  annotations:\n    io.kubernetes.cri-o.userns-mode: \"auto:size=65536\"\nspec:\n  runtimeClassName: sysbox-runc\n  containers:\n  - image: \"hello-world\"\n    imagePullPolicy: Always\n    name: main\n  restartPolicy: Always\nEOF\n\n\nkubectl -n default get pod sysbox-verification-pod\n\nkubectl exec -it sysbox-verification-pod -- echo \"Pod is running\nsuccessfully on a Kubernetes cluster configured with Sysbox.\"\n\nkubectl -n default delete pod sysbox-verification-pod\n\n```\n\n\n## Configure GitLab agent for Kubernetes and GitLab Workspaces Proxy\n\n\nFollow our [documentation\ntutorial](https://docs.gitlab.com/ee/user/workspace/set_up_gitlab_agent_and_proxies.html)\nto set up GitLab agent and GitLab Workspaces Proxy.  \n\n\n## Configure sudo access for a workspace with Sysbox\n\n\nFollow our\n[documentation](https://docs.gitlab.com/ee/user/workspace/configuration.html#with-sysbox)\nto configure sudo access for a workspace with Sysbox.\n\n\n## Configure Ingress Controller\n\n\nSetup [Ingress NGINX Controller for\nKubernetes](https://github.com/kubernetes/ingress-nginx)\n\n\n```\n\nhelm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx\n--force-update\n\nhelm repo update\n\n\nhelm upgrade --install \\\n  ingress-nginx ingress-nginx/ingress-nginx \\\n  --namespace ingress-nginx \\\n  --create-namespace \\\n  --version 4.11.1 \\\n  --timeout=600s --wait --wait-for-jobs\n\nkubectl -n ingress-nginx get pod\n\n```\n\n\n## Build containers inside a workspace\n\n\nWe’ll use\n[example-go-http-app](https://gitlab.com/gitlab-org/workspaces/examples/example-go-http-app)\nas the project to create a workspace from. Open the workspace, start a\nterminal, and install [Docker](https://docs.docker.com/engine/install/).\n\n\n```\n\n# Add Docker's official GPG key:\n\nsudo apt-get update\n\nsudo apt-get install ca-certificates curl\n\nsudo install -m 0755 -d /etc/apt/keyrings\n\nsudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o\n/etc/apt/keyrings/docker.asc\n\nsudo chmod a+r /etc/apt/keyrings/docker.asc\n\n\n# Add the repository to Apt sources:\n\necho \\\n  \"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \\\n  $(. /etc/os-release && echo \"${UBUNTU_CODENAME:-$VERSION_CODENAME}\") stable\" | \\\n  sudo tee /etc/apt/sources.list.d/docker.list > /dev/null\nsudo apt-get update\n\nsudo apt-get install -y docker-ce docker-ce-cli containerd.io\ndocker-buildx-plugin docker-compose-plugin\n\n\n# Start the Docker Daemon\n\nsudo dockerd\n\n```\n\n\nBuild the container image.\n\n\n```\n\nsudo docker build -t workspaces-golang-server .\n\n```\n\n\n## Run containers inside a workspace\n\n\nLet’s run the container built above and expose port 3000 from the container\nonto the host (workspace).\n\n\n```\n\nsudo docker run -p 3000:3000 workspaces-golang-server\n\n```\n\n\nThe port `3000` is exposed in the\n[.devfile.yaml](https://gitlab.com/gitlab-org/workspaces/examples/example-go-http-app/-/blob/dd3dbb38cdce1143f7ed023980f34630cea991a5/.devfile.yaml#L15)\nused to create the workspace. Access the server running inside the container\nfrom the browser. Here is a video clip.\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/JQErF0U6oFk?si=6oiK48q5ghZq312g\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## Get started today\n\n\nFrom GitLab 17.4, you can build and run containers securely in GitLab\nWorkspaces. See our\n[documentation](https://docs.gitlab.com/ee/user/workspace/configuration.html#build-and-run-containers-in-a-workspace)\nfor more information. Replace your local development environments to GitLab\nWorkspaces for a secure, ephemeral, reproducible development environment. \n\n\n## Read more\n\n\n- [Enable secure sudo access for GitLab Remote Development\nworkspaces](https://about.gitlab.com/blog/enable-secure-sudo-access-for-gitlab-remote-development-workspaces/)\n\n- [Quickstart guide for GitLab Remote Development\nworkspaces](https://about.gitlab.com/blog/quick-start-guide-for-gitlab-workspaces/)\n\n- [Create a workspace quickly with the GitLab default\ndevfile](https://about.gitlab.com/blog/create-a-workspace-quickly-with-the-gitlab-default-devfile/)\n\n- [Contributor how-to: Remote Development workspaces and GitLab Developer\nKit](https://about.gitlab.com/blog/gitlab-gdk-remote-development/)\n",[719,9,695,717],{"slug":1116,"featured":91,"template":698},"build-and-run-containers-in-remote-development-workspaces","content:en-us:blog:build-and-run-containers-in-remote-development-workspaces.yml","Build And Run Containers In Remote Development Workspaces","en-us/blog/build-and-run-containers-in-remote-development-workspaces.yml","en-us/blog/build-and-run-containers-in-remote-development-workspaces",{"_path":1122,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1123,"content":1129,"config":1135,"_id":1137,"_type":14,"title":1138,"_source":16,"_file":1139,"_stem":1140,"_extension":19},"/en-us/blog/building-a-more-inclusive-and-welcoming-open-source-community-on-gitlab",{"title":1124,"description":1125,"ogTitle":1124,"ogDescription":1125,"noIndex":6,"ogImage":1126,"ogUrl":1127,"ogSiteName":685,"ogType":686,"canonicalUrls":1127,"schema":1128},"Building a more inclusive and welcoming open source community on GitLab","Open source projects using GitLab can now easily apply for CHAOSS DEI badges.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749683305/Blog/Hero%20Images/AdobeStock_449040869.jpg","https://about.gitlab.com/blog/building-a-more-inclusive-and-welcoming-open-source-community-on-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Building a more inclusive and welcoming open source community on GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Emilio Salvador\"}],\n        \"datePublished\": \"2024-01-29\",\n      }",{"title":1124,"description":1125,"authors":1130,"heroImage":1126,"date":865,"body":1132,"category":1133,"tags":1134},[1131],"Emilio Salvador","At GitLab, our mission is to create a world where everyone can contribute. To keep building that world, we want to equip open source projects using GitLab with tools to foster more inclusive and welcoming communities.\nToday we're announcing one of those tools.\n\nWe're excited to share that GitLab has partnered with the Community Health Analytics in Open Source Software ([CHAOSS](https://chaoss.community/)) project to integrate GitLab with their [recently released DEI Project Badging program](https://go.gitlab.com/JKwGOR). The DEI Project Badging program enables open source projects to:\n- Clearly signal their focus on building diverse communities\n- More easily highlight the work they are doing to welcome and support new members\n- Add visual badges to their projects to indicate their community's reflection on the CHAOSS DEI badging metrics\n\nGitLab is already [badged](https://go.gitlab.com/QFJutN). Many of our open source partners are joining us (see below). Now your project can be badged, too. Everything you need to start using this new integration is on the [CHAOSS Project Badging](https://go.gitlab.com/qEcu1s) site.\n\nRead on to learn how this initiative came together — and how you can get involved.\n\n## Productive CHAOSS\nFinding an open source community to learn, connect, and grow with isn't always easy. In fact, [a Linux Foundation report on diversity, equity, and inclusion](https://www.linuxfoundation.org/research/the-2021-linux-foundation-report-on-diversity-equity-and-inclusion-in-open-source) (DEI) in open source communities highlighted just how difficult it can be for certain contributors.\nOpen source contributors from underrepresented groups were more likely to feel unwelcome and experience exclusionary practices. The report noted barriers to representation in leadership and language blockers around reading and writing in the English language. Here are some findings from the report:\n- 36% of respondents reported experiencing some form of stereotyping behavior based on perceived demographic characteristics. \n- 30% of respondents were unsure a code of conduct would be enforced. \n- 22% of respondents disagreed that equal opportunity exists for people with different backgrounds to be part of the decision-making process.\n\nThese numbers are concerning. But to anyone familiar with the struggles of diversity, equity, and inclusion in open source, they're likely not surprising, either.\n\nTo better align on best practices for building inclusive open source communities, and to ensure that [GitLab's company DEI value](https://handbook.gitlab.com/handbook/company/culture/inclusion/) is reflected in our contributor community, GitLab partnered with an authoritative source: the [CHAOSS DEI working group](https://chaoss.community/diversity-and-inclusion-badging/). Founded in 2017, the [CHAOSS community](https://chaoss.community/) is a Linux Foundation project that defines open source community health metrics.\n\nThe group builds tools that everyone working in open source — maintainers, developers, and community managers alike — can use to spot trends in their open source projects. Ultimately, CHAOSS wants to enable everyone to create healthier and more sustainable open source communities.\n\nThe CHAOSS DEI working group asked us to reflect on their open source [inclusivity metrics](https://github.com/badging/ProjectBadging/blob/main/Template.DEI.md) and showcase what we were doing at GitLab to build a more inclusive open source ecosystem. They also invited us to contribute to one of their most ambitious projects: a system of badges that open source communities can use to clearly signal their dedication to making that ecosystem a more supportive place.\n\n### How the DEI Project Badging system works\nIt works like this.\n\nOpen source projects compose DEI Project Statements and place those statements prominently in their codebases (as a file named DEI.md). Writing the statement involves taking a real, hard look at what DEI means for a project's unique community — not just signing off on a shared, generalized statement. By publicly sharing these statements, the entire open source ecosystem learns and grows collectively.\n\nCHAOSS offers an automated tool for scanning a project repository for the presence of the DEI.md file, then returns its contents to CHAOSS for review. If the project meets the CHAOSS project's criteria for diversity, equity, and inclusion, CHAOSS awards that project a badge, signifying its status as an inclusive project.\n\n\"CHAOSS spends a lot of time thinking about open source community health, so we are thrilled to be able to help open source projects better communicate and surface their efforts to build more inclusive communities,\" says Elizabeth Barron, a community manager for CHAOSS. \"We are hopeful that advocating for a more consistent way to do so (via a DEI.md file) will offer a better way for a project to share their approach with other projects, in true open source fashion.\"\n\nWe liked what we saw. And we knew we could pitch in — not just by writing and certifying our own [DEI Project Statement](https://go.gitlab.com/QFJutN), but by integrating the CHAOSS project's tool with GitLab so other communities could, too.\n\n## GitLab contributes\nSo we got to work. We examined practices from teams across GitLab, including Developer Relations, Contributor Success, GitLab UX, the Product Accessibility working group, and the Diversity, Inclusion and Belonging team. To help draft an initial Bronze tier for the DEI Badging program, we shared example practices from GitLab, including:\n- GitLab's project maintainers and merge request coaches span global timezones and work asynchronously with contributors across the world.\n- GitLab operates the GitLab Unfiltered YouTube channel that shares all publicly available meetings, recordings, and community pairing sessions.\n- GitLab uses weekly triage reports on first-time contributors who are awaiting a response to a merge request. This ensures newcomers always hear back from a real person about their merge request.\n\nIn partnership with CHAOSS, we'll continue to build more tiers into the program. This will help motivate projects to continue their DEI efforts and reflect on more CHAOSS DEI metrics.\n\nAnd we've made it easier for open source projects on GitLab to get badged, too, by collaborating with CHAOSS to directly integrate GitLab with the badging application process. The CHAOSS badging website features a \"Login with GitLab\" button, which provides project owners a single-click connection between their GitLab projects and CHAOSS. \n\nFor communities using GitLab to build open source software, this makes submitting a project and scanning it for a DEI.md file fast and easy. For self-hosted GitLab projects, applicants can submit a form on the CHAOSS badging website to get a review.\n\nTo help sustain this initiative, GitLab is sponsoring the CHAOSS Africa chapter, the team behind development of the DEI Project Badging system. Since its inception in 2022, CHAOSS Africa has seen impressive growth while solving the challenges of open source communities in Africa and helping newcomers become open source contributors.\u2028 We're eager to see what they continue building together with their communities.\n\n## With help from our friends\nWe're not acting alone. Building a more diverse, equitable, and inclusive open source ecosystem requires collective commitment. In that spirit, several of our [open source partners](https://about.gitlab.com/solutions/open-source/partners/) are announcing their support of the badging integration.\n\nHere's what they had to say:\n\n\"The [Drupal Association](https://www.drupal.org/) is proud to be reinforcing our longstanding commitment to diversity, equity, inclusion, and justice by partnering with CHAOSS and GitLab right at the launch of this initiative. Drupal is recognized as a Digital Public Good by the United Nations-endorsed Digital Public Goods Alliance, and we feel the responsibility of building a better, more open internet that recognizes, elevates, and serves historically underrepresented communities. We're hopeful that this effort is part of a sea change in open source communities, and software development in general, to better recognize, evaluate, and redress DEI challenges that we have a collective responsibility to solve. We believe this metric-driven approach will help projects reinforce each other's good behavior, and inspire the industry as a whole. We're looking forward to cataloging our DEI commitments according to this new process, to share and compare with the wider ecosystem.\" **— [Tim Lehnen](https://gitlab.com/hestenet-drupal), CTO, Drupal Association**\n\n\"[The Good Docs Project](https://thegooddocsproject.dev/) is excited to join with CHAOSS and GitLab to promote the values of diversity, equity, and inclusion in open source. We want to empower our community members to do their best work and be their authentic selves. By participating in this initiative, we hope to think deeply about how we can promote greater diversity, equity, and inclusion in our project and then develop concrete policies and actions to support those goals. We pledge to develop our policies and earn our DEI badge from CHAOSS within the next few months.\" **— [Alyssa Rock](https://gitlab.com/barbaricyawps), Community Manager, The Good Docs Project**\n\n\"The integration of CHAOSS project's diversity, equity, and inclusion (DEI) initiative with GitLab is an important milestone for building more inclusive open source software, one that resonates on all levels with our [Colmena Project](https://blog.colmena.media/). The initiative creates the necessary visibility for many inclusive open source projects, not only paves the way for an ecosystem-focused approach to software development in general, but also encourages greater cooperation at a peer-to-peer level. It enables community members to recognize the vast diversity of contexts involved in the work of software development, and to inspire each other. This is important to the Colmena project, which is focused on supporting community and local media that makes visible the reality of indigenous peoples, women, youth, and different identities that are not part of the agenda of mainstream media. Participating in this initiative gives us the opportunity to better recognize DEI challenges and constantly reflect on our work to readjust and improve our efforts. We commit to continuing the dialogue with our community on these issues, documenting our efforts transparently and making necessary readjustments to policies and procedures.\" **— [Nils Brock](https://gitlab.com/nilsbrock), Program Director, Colmena**\n\n\"The [Kali Linux](https://www.kali.org/) team is very proud to have been invited to take part in this initiative, and we are looking forward to what it means for the open source community. We are committed to being as inclusive as possible and hope to demonstrate that through our efforts. For more information on what we are planning on doing to support it, please read our [DEI Promise](https://www.kali.org/blog/dei-promise/).\" **— [Joe O'Gorman](https://gitlab.com/Gamb1t), Community Manager, Kali Linux**\n\n## Let's build together\nThe work is far from over.\n\n\"We are committed to diversifying open source communities on GitLab. It's a critical part of our strategy for Diversity, Equity, and Inclusion, at GitLab in 2024 and beyond,\" says Sherida McMullan, Vice President of Diversity Inclusion & Belonging at GitLab. \"This DEI Project Badging program launched in partnership with CHAOSS helps us to make great strides in fostering an inclusive open source space and highlighting inclusive projects. As we enter Black History month, this is just the beginning of the impact we are looking to make in GitLab's open source communities.\"\n\nWe invite every member of the GitLab community to join us in making the open source community on GitLab a more diverse, more equitable, and more inclusive place to build the future together. Get started today by certifying your project, contributing to GitLab, and helping us create a world where everyone can contribute.","news",[1133,9,951],{"slug":1136,"featured":6,"template":698},"building-a-more-inclusive-and-welcoming-open-source-community-on-gitlab","content:en-us:blog:building-a-more-inclusive-and-welcoming-open-source-community-on-gitlab.yml","Building A More Inclusive And Welcoming Open Source Community On Gitlab","en-us/blog/building-a-more-inclusive-and-welcoming-open-source-community-on-gitlab.yml","en-us/blog/building-a-more-inclusive-and-welcoming-open-source-community-on-gitlab",{"_path":1142,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1143,"content":1149,"config":1156,"_id":1158,"_type":14,"title":1159,"_source":16,"_file":1160,"_stem":1161,"_extension":19},"/en-us/blog/building-a-text-adventure-using-cplusplus-and-code-suggestions",{"title":1144,"description":1145,"ogTitle":1144,"ogDescription":1145,"noIndex":6,"ogImage":1146,"ogUrl":1147,"ogSiteName":685,"ogType":686,"canonicalUrls":1147,"schema":1148},"Explore the Dragon Realm: Building a C++ adventure game with AI","How to use GitLab Duo Code Suggestions to create a text-based adventure game, including magical locations to visit and items to procure, using C++.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663344/Blog/Hero%20Images/compassinfield.jpg","https://about.gitlab.com/blog/building-a-text-adventure-using-cplusplus-and-code-suggestions","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Explore the Dragon Realm: Build a C++ adventure game with a little help from AI\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fatima Sarah Khalid\"}],\n        \"datePublished\": \"2023-08-24\",\n      }",{"title":1150,"description":1145,"authors":1151,"heroImage":1146,"date":1153,"body":1154,"category":801,"tags":1155},"Explore the Dragon Realm: Build a C++ adventure game with a little help from AI",[1152],"Fatima Sarah Khalid","2023-08-24","Learning, for me, has never been about reading a textbook or sitting in on a\nlecture - it's been about experiencing and immersing myself in a hands-on\nchallenge. This is particulary true for new programming languages. With\n[GitLab Duo Code Suggestions](https://about.gitlab.com/gitlab-duo/),\nartificial intelligence (AI) becomes my interactive guide, providing an\nenvironment for trial, error, and growth. In this tutorial, we will build a\ntext-based adventure game in C++ by using Code Suggestions to learn the\nprogramming language along the way.\n\n\nYou can use this table of contents to navigate into each section. It is\nrecommended to read top-down for the best learning experience.\n\n\n- [Setup](#setup)\n  - [Installing VS Code](#installing-vs-code)\n  - [Installing Clang as a compiler](#installing-clang-as-a-compiler)\n  - [Setting up VS Code](#setting-up-vs-code)\n- [Getting started](#getting-started)\n  - [Compiling and running your program](#compiling-and-running-your-program)\n- [Setting the text adventure stage](#setting-the-adventure-stage)\n\n- [Defining the adventure: Variables](#defining-the-adventure-variables)\n\n- [Crafting the adventure: Making decisions with\nconditionals](#crafting-the-adventure-making-decisions-with-conditionals)\n\n- [Structuring the narrative:\nCharacters](#structuring-the-narrative-characters)\n\n- [Structuring the narrative: Items](#structuring-the-narrative-items)\n\n- [Applying what we've learned at the Grand\nLibrary](#applying-what-weve-learned-at-the-grand-library)\n\n- [See you next time in the Dragon\nRealm](#see-you-next-time-in-the-dragon-realm)\n\n- [Share your feedback](#share-your-feedback)\n\n\n> Download [GitLab Ultimate for free](https://about.gitlab.com/gitlab-duo/)\nfor a trial of GitLab Duo Code Suggestions.\n\n\n## Setup\n\nYou can follow this tutorial in your [preferred and supported\nIDE](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html#enable-code-suggestions-in-other-ides-and-editors).\nReview the documentation to enable Code Suggestions for [GitLab.com\nSaaS](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html#enable-code-suggestions-on-gitlab-saas)\nor [GitLab self-managed\ninstances](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html#enable-code-suggestions-on-self-managed-gitlab).\n\n\nThese installation instructions are for macOS Ventura on M1 Silicon. \n\n\n### Installing VS Code\n\n\n* Download and install [VS Code](https://code.visualstudio.com/download).\n\n* Alternatively, you can also install it as a Homebrew cask: `brew install\n--cask visual-studio-code`.\n\n\n### Installing Clang as a compiler\n\n\n* On macOS, you'll need to install some developer tools. Open your terminal\nand type:\n\n\n```\n\nxcode-select --install\n\n```\n\n\nThis will prompt you to install Xcode's command line tools, which include\nthe [Clang C++ compiler](https://clang.llvm.org/get_started.html).\n\n\nAfter the installation, you can check if `clang++` is installed by typing:\n\n\n```\n\nclang++ --version\n\n```\n\n\nYou should see an output that includes some information about the Clang\nversion you have installed. \n\n\n### Setting up VS Code\n\n\n* Launch VS Code.\n\n* Install and configure [the GitLab Workflow\nextension](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow).\n\n* Optionally, in VS Code, install the [C/C++ Intellisense\nextension](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools),\nwhich helps with debugging C/C++. \n\n\n## Getting started\n\nNow, let's start building this magical adventure with C++. We'll start with\na \"Hello World\" example.\n\n\nCreate a new project `learn-ai-cpp-adventure`. In the project root, create\n`adventure.cpp`. The first part of every C++ program is the `main()`\nfunction. It's the entry point of the program.\n\n\nWhen you start writing `int main() {`, Code Suggestions will help\nautocomplete the function with some default parameters.\n\n\n![adventure.cpp with a hello world implementation suggested by Code\nSuggestions](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/0-helloworld.png){:\n.shadow}\n\n\n```cpp\n\nint main()\n\n{\n    cout \u003C\u003C \"Hello World\" \u003C\u003C endl;\n    return 0;\n}\n\n```\n\n\nWhile this is a good place to start, we need to add an include and update\nthe output statement:\n\n\n```cpp\n\n#include \u003Ciostream> // Include the I/O stream library for input and output\n\n\n// Main function, the starting point of the program\n\nint main()\n\n{\n    // Print \"Hello World!\" to the console\n    std::cout \u003C\u003C \"Hello World!\" \u003C\u003C std::endl;\n\n    // Return 0 to indicate successful execution\n    return 0;\n}\n\n```\n\n\nThe program prints \"Hello World!\" to the console when executed.\n\n\n* `#include \u003Ciostream>`: Because we are building a text-based adventure, we\nwill rely on input from the player using input and output operations (I/O)\nin C++. This include is a preprocessor directive that tells our program to\ninclude the `iostream` library, which provides facilities to use input and\noutput streams, such as `std::cout` for output.\n\n\n* You might find that Code Suggestions suggests `int main(int argc, char*\nargv[])` as the definition of our main function. The parameters `(int argc,\nchar* argv[])` are used to pass command-line arguments to the program. Code\nSuggestions added them as default parameters, but they are not needed if\nyou're not using command-line arguments. In that case, we can also define\nthe main function as `int main()`.\n\n\n* `std::cout \u003C\u003C \"Hello World!\" \u003C\u003C std::endl;`: outputs \"Hello World\" to the\nconsole. The stream operator `\u003C\u003C` is used to send the string to output.\n`std::endl` is an end-line character.\n\n\n* `return 0;`: we use `return 0;` to indicate the end of the `main()`\nfunction and return a value of 0. In C++, it is good practice to return 0 to\nindicate the program has completed successfully.\n\n\n### Compiling and running your program\n\nNow that we have some code, let's review how we'll compile and run this\nprogram. \n\n* Open your terminal or use the terminal in VSCode (View -> Terminal).\n\n* Navigate to your project directory.\n\n* Compile your program by typing:\n\n\n```bash\n\nclang++ adventure.cpp -o adventure\n\n```\n\n\nThis command tells the Clang++ compiler to compile adventure.cpp and create\nan executable named adventure. After this, run your program by typing:\n\n\n```\n\n./adventure\n\n```\n\n\nYou should see \"Hello World!\" printed in the terminal. \n\n\nBecause our tutorial uses a single source file `adventure.cpp`, we can use\nthe compiler directly to build our program. In the future, if the program\ngrows beyond a file, we'll set up additional configurations to handle\ncompilation. \n\n\n## Setting the text adventure stage\n\nBefore we get into more code, let's set the stage for our text adventure.\n\n\nFor this text adventure, players will explore the Dragon Realm. The Dragon\nRealm is full of mountains, lakes, and magic. Our player will enter the\nDragon Realm for the first time, explore different locations, meet new\ncharacters, collect magical items, and journal their adventure. At every\nlocation, they will be offered choices to decide the course of their\njourney.\n\n\nTo kick off our adventure into the Dragon Realm, let's update our\n`adventure.cpp main()` function to be more specific. As you update the\nwelcome message, you might find that Code Suggestions already knows we're\nbuilding a game.\n\n\n![adventure.cpp - Code Suggestions offers suggestion of welcoming users to\nthe Dragon Realm and knows its a\ngame](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/1-welcome-to-the-realm.png){:\n.shadow}\n\n\n```cpp\n\n#include \u003Ciostream> // Include the I/O stream library for input and output\n\n\n// Main function, the starting point of the program\n\nint main()\n\n{\n    // Print \"Hello World!\" to the console\n    std::cout \u003C\u003C \"Welcome to the Dragon Realm!\" \u003C\u003C std::endl;\n\n    // Return 0 to indicate successful execution\n    return 0;\n}\n\n```\n\n\n## Defining the adventure: Variables\n\nA variable stores data that can be used throughout the program scope in the\n`main()` function. A variable is defined by a type, which indicates the kind\nof data it can hold.\n\n\nLet's create a variable to hold our player's name and give it the type\n`string`. A `string` is designed to hold a sequence of characters so it's\nperfect for storing our player's name.\n\n\n```cpp\n\n#include \u003Ciostream> // Include the I/O stream library for input and output\n\n\n// Main function, the starting point of the program\n\nint main()\n\n{\n    // Print \"Hello World!\" to the console\n    std::cout \u003C\u003C \"Welcome to the Dragon Realm!\" \u003C\u003C std::endl;\n\n    // Declare a string variable to hold the player's name\n    std::string playerName;\n\n    // Return 0 to indicate successful execution\n    return 0;\n}\n\n```\n\n\nAs you do this, you may notice that Code Suggestions knows what's coming\nnext - prompting the user for their player's name.\n\n\n![adventure.cpp - Code Suggestions suggests welcoming the player with the\nplayerName\nvariable](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/2-player-name-variable.png){:\n.shadow}\n\n\nWe may be able to get more complete and specific Code Suggestions by\nproviding comments about what we'd like to do with the name - personally\nwelcome the player to the game. Start by adding our plan of action in\ncomments.\n\n\n```cpp\n    // Declare a string variable to hold the player's name\n    std::string playerName;\n\n    // Prompt the user to enter their player name\n\n    // Display a personalized welcome message to the player with their name\n```\n\n\nTo capture the player's name from input, we need to use the `std::cin`\nobject from the `iostream` library to fetch input from the player using the\nextraction operator `>>`. If you start typing `std::` to start prompting the\nuser, Code Suggestions will make some suggestions to help you gather user\ninput and save it to our `playerName` variable.\n\n\n![adventure.cpp - Code Suggestions prompts the user to input their player\nname](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/2.1-player-name-input.png){:\n.shadow}\n\n\nNext, to welcome our player personally to the game, we want to use\n`std::cout` and the `playerName` variable together:\n\n\n```cpp\n    // Declare a string variable to store the player name\n    std::string playerName;\n\n    // Prompt the user to enter their player name\n    std::cout \u003C\u003C \"Please enter your name: \";\n    std::cin >> playerName;\n\n    // Display a personalized welcome message to the player with their name\n    std::cout \u003C\u003C \"Welcome \" \u003C\u003C playerName \u003C\u003C \" to The Dragon Realm!\" \u003C\u003C std::endl;\n```\n\n\n## Crafting the adventure: Making decisions with conditionals\n\nIt's time to introduce our player to the different locations in tbe Dragon\nRealm they can visit. To prompt our player with choices, we use\nconditionals. Conditionals allow programs to take different actions based on\ncriteria, such as user input.\n\n\nLet's offer the player a selection of locations to visit and capture their\nchoice as an `int` value that corresponds to the location they picked.\n\n\n```cpp\n\n// Display a personalized welcome message to the player with their name\n\nstd::cout \u003C\u003C \"Welcome \" \u003C\u003C playerName \u003C\u003C \" to The Dragon Realm!\" \u003C\u003C\nstd::endl;\n\n\n// Declare an int variable to capture the user's choice\n\nint choice;\n\n```\n\n\nThen, we want to offer the player the different locations that are possible\nfor that choice. Let's start with a comment and prompt Code Suggestions with\n`std::cout` to fill out the details for us.\n\n\n![adventure.cpp - Code Suggestions suggests a multiline output for all the\nlocations listed in the code\nbelow](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3-setup-location-choice.png){:\n.shadow}\n\n\nAs you accept the suggestions, Code Suggestions will help build out the\noutput and ask the player for their input.\n\n\n![adventure.cpp - Code Suggestions suggests a multiline output for all the\nlocations listed in the code below and asks for player\ninput](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.1-capture-player-location-choice.png){:\n.shadow}\n\n\n```cpp\n    // Declare an int variable to capture the user's choice\n    int choice;\n\n    // Offer the player a choice of 3 locations: 1 for Moonlight Markets, 2 for Grand Library, and 3 for Shimmer Lake.\n    std::cout \u003C\u003C \"Where will \" \u003C\u003C playerName \u003C\u003C \" go?\" \u003C\u003C std::endl;\n    std::cout \u003C\u003C \"1. Moonlight Markets\" \u003C\u003C std::endl;\n    std::cout \u003C\u003C \"2. Grand Library\" \u003C\u003C std::endl;\n    std::cout \u003C\u003C \"3. Shimmer Lake\" \u003C\u003C std::endl;\n    std::cout \u003C\u003C \"Please enter your choice: \";\n    std::cin >> choice;\n```\n\n\nOnce you start typing `std::cin >>` or accept the prompt for asking the\nplayer for their choice, Code Suggestions might offer a suggestion for\nbuilding out your conditional flow. AI is non-deterministic: One suggestion\ncan involve if/else statements while another solution uses a switch\nstatement.\n\n\nTo give Code Suggestions a nudge, we'll add a comment and start typing out\nan if statement: `if (choice ==)`.\n\n\n![adventure.cpp - Code Suggestions suggests using an if statement to manage\nchoice of\nlocations](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.2-if-statement-locations.png){:\n.shadow}\n\n\nAnd if you keep accepting the subsequent suggestions, Code Suggestions will\nautocomplete the code using if/else statements.\n\n\n![adventure.cpp - Code Suggestions helps the user fill out the rest of the\nif/else statements for choosing a\nlocation](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.2.1-if-statement-locations-continued.png){:\n.shadow}\n\n\n```cpp\n    // Check the user's choice and display the corresponding messages\n    if (choice == 1) {\n        std::cout \u003C\u003C \"You chose Moonlight Markets\" \u003C\u003C std::endl;\n    }\n    else if (choice == 2) {\n        std::cout \u003C\u003C \"You chose Grand Library\" \u003C\u003C std::endl;\n    }\n    else if (choice == 3) {\n        std::cout \u003C\u003C \"You chose Shimmer Lake\" \u003C\u003C std::endl;\n    }\n    else {\n        std::cout \u003C\u003C \"Invalid choice\" \u003C\u003C std::endl;\n    }\n```\n\n\n`if/else` is a conditional statement that allows a program to execute code\nbased on whether a condition, in this case the player's choice, is true or\nfalse. If the condition evaluates to true, the code inside the braces is\nexecuted.\n\n\n* `if (condition)`: used to check if the condition is true.\n\n* `else if (another condition)`: if the previous condition isn't true, the\nprograms checks this condition.\n\n* `else`: if none of the previous conditions are true.\n\n\nAnother way of managing multiple choices like this example is using a\n`switch()` statement. A `switch` statement allows our program to jump to\ndifferent sections of code based on the value of an expression, which, in\nthis case, is the value of `choice`.\n\n\nWe are going to replace our `if/else` statements with a `switch` statement.\nYou can comment out or delete the `if/else` statements and prompt Code\nSuggestions starting with `switch(choice) {`.\n\n\n![adventure.cpp - Code Suggestions helps the user handle the switch\nstatement for the\nlocations](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.3-conditional-switch-locations.png){:\n.shadow}\n\n\n![adventure.cpp - Code Suggestions helps the user handle the switch\nstatement for the\nlocations](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.3.1-conditional-switch-locations-continued.png){:\n.shadow}\n\n\n```cpp\n    // Evaluate the player's decision\n    switch(choice) {\n        // If 'choice' is 1, this block is executed.\n        case 1:\n            std::cout \u003C\u003C \"You chose Moonlight Markets.\" \u003C\u003C std::endl;\n            break;\n        // If 'choice' is 2, this block is executed.\n        case 2:\n            std::cout \u003C\u003C \"You chose Grand Library.\" \u003C\u003C std::endl;\n            break;\n        // If 'choice' is 3, this block is executed.\n        case 3:\n            std::cout \u003C\u003C \"You chose Shimmer Lake.\" \u003C\u003C std::endl;\n            break;\n        // If 'choice' is not 1, 2, or 3, this block is executed.\n        default:\n            std::cout \u003C\u003C \"You did not enter 1, 2, or 3.\" \u003C\u003C std::endl;\n    }\n```\n\n\nEach case represents a potential value that the variable or expression being\nswitched on (in this case, choice) could have. If a match is found, the code\nfor that case is executed. We use the `default` case to handle any input\nerrors in case the player enters a value that isn't accounted for.\n\n\nLet's build out what happens when our player visits the Shimmering Lake.\nI've added some comments after the player's arrival at Shimmering Lake to\nprompt Code Suggestions to help us build this out:\n\n\n```cpp\n    // If 'choice' is 3, this block is executed.\n    case 3:\n        std::cout \u003C\u003C \"You chose Shimmering Lake.\" \u003C\u003C std::endl;\n        // The player arrives at Shimmering Lake. It is one of the most beautiful lakes the player has ever seen.\n        // The player hears a mysterious melody from the water.\n        // They can either 1. Stay quiet and listen, or 2. Sing along with the melody.\n\n        break;\n```\n\n\nNow, if you start writing `std::cout` to begin offering the player this new\ndecision point, Code Suggestions will help fill out the output code.\n\n\n![adventure.cpp - Code Suggestions helps fill out the output code based on\nthe comments about the interaction at the\nLake](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.4-case-3-output.png){:\n.shadow}\n\n\nYou might find that the code provided by Code Suggestions is very\ndeclarative. Once I've accepted the suggestion, I personalize the code as\nneeded. For example in this case, including the melody the player heard and\nusing the player's name instead of \"you\":\n\n\n![adventure.cpp - I added the playerName to the output and then prompted\nCode Suggestions to continue the narrative based on the comments\nabove](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.4.1-customizing-output.png){:\n.shadow}\n\n\nI also wanted Code Suggestions to offer suggestions in a specific format, so\nI added an end line:\n\n\n![adventure.cpp - I added an end line to prompt Code Suggestions to break\nthe choices into end line\noutputs](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.4.2-customizing-output-endline.png){:\n.shadow}\n\n\n![adventure.cpp - I added an endline to prompt Code Suggestions to break the\nchoices into end line\noutputs](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.4.3-sub-choices-output.png){:\n.shadow}\n\n\nNow, we'd like to offer our player a nested choice in this scenario. Before\nwe can define the new choices, we need a variable to store this nested\nchoice. Let's define a new variable `int nestedChoice` in our `main()`\nfunction, outside of the `switch()` statement we set up. You can put it\nafter our definition of the `choice` variable.\n\n\n```cpp\n    // Declare an int variable to capture the user's choice\n    int choice;\n    // Declare an int variable to capture the user's nested choice\n    int nestedChoice;\n```\n\n\nNext, returning to the `if/else` statement we were working on in `case 3`,\nwe want to prompt the player for their decision and save it in\n`nestedChoice`.\n\n\n![adventure.cpp - I added an end line to prompt Code Suggestions to break\nthe choices into end line\noutputs](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.4.4-capture-nested-choice.png){:\n.shadow}\n\n\nAs you can see, Code Suggestions wants to go ahead and handle the user's\nchoice using another `switch` statement. I would prefer to use an `if/else`\nstatement to handle this decision point.\n\n\nFirst, let's add some comments to give context:\n\n\n```cpp\n    // Capture the user's nested choice\n    std::cin >> nestedChoice;\n\n    // If the player chooses 1 and remains silent, they hear whispers of the merfolk below, but nothing happens.\n    // If the player chooses 2 and sings along, a merfolk surfaces and gifts them a special blue gem as a token of appreciation for their voice.\n\n    // Evaluate the user's nestedChoice\n```\n\n\nThen, start typing `if (nestedChoice == 1)` and Code Suggestions will start\nto offer suggestions:\n\n\n![adventure.cpp - Code Suggestions starts to build out an if statement to\nhandle the\nnestedChoice](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.5-nested-choice-if.png){:\n.shadow}\n\n\nIf you tab to accept them, Code Suggestions will continue to fill out the\nrest of the nested `if/else` statements.\n\n\nSometimes, while you're customizing the suggestions that Code Suggestions\ngives, you may even discover that it would like to make creative\nsuggestions, too!\n\n\n![adventure.cpp - Code Suggestions makes a creative suggestion to end the\ninteraction with the merfolk by saying \"You are now free to go\" after you\nreceive the\ngem.](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.5.2-nested-cs-creative-suggestion.png){:\n.shadow}\n\n\nHere's the code for `case 3` for the player's interaction at Shimmering Lake\nwith the nested decision. I've updated some of the narrative dialogue\nplayer's name.\n\n```\n    // Handle the Shimmering Lake scenario.\n    case 3:\n        std::cout \u003C\u003C playerName \u003C\u003C \" arrives at Shimmering Lake. It is one of the most beautiful lakes that\" \u003C\u003C playerName \u003C\u003C \" has seen. They hear a mysterious melody from the water. They can either: \" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"1. Stay quiet and listen\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"2. Sing along with the melody\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"Please enter your choice: \";\n\n        // Capture the user's nested choice\n        std::cin >> nestedChoice;\n\n        // If the player chooses to remain silent\n        if (nestedChoice == 1)\n        {\n            std::cout \u003C\u003C \"Remaining silent, \" \u003C\u003C playerName \u003C\u003C \" hears whispers of the merfolk below, but nothing happens.\" \u003C\u003C std::endl;\n        }\n        // If the player chooses to sing along with the melody\n        else if (nestedChoice == 2)\n        {\n            std::cout \u003C\u003C \"Singing along, a merfolk surfaces and gifts \" \u003C\u003C playerName\n                    \u003C\u003C \" a special blue gem as a token of appreciation for their voice.\"\n                    \u003C\u003C std::endl;\n        }\n        break;\n```\n\n\nOur player isn't limited to just exploring Shimmering Lake. There's a whole\nrealm to explore and they might want to go back and explore other locations.\n\n\nTo facilitate this, we can use a `while` loop. A loop is a type of\nconditional that allows a specific section of code to be executed multiple\ntimes based on a condition. For the `condition` that allows our `while` loop\nto run multiple times, let's use a `boolean` to initialize the loop\ncondition.\n\n\n```cpp\n    // Initialize a flag to control the loop and signify the player's intent to explore.\n    bool exploring = true;\n    // As long as the player wishes to keep exploring, this loop will run.\n    while(exploring) {\n        // wrap the code for switch(choice)\n    }\n```\n\n\nWe also need to move our location prompt inside the `while` loop so that the\nplayer can visit more than one location at the time.\n\n\n![adventure.cpp - CS helps us write a go next prompt for the\nlocations](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.6-while-loop-go-next.png){:\n.shadow}\n\n\n```cpp\n    // Initialize a flag to control the loop and signify the player's intent to explore.\n    bool exploring = true;\n    // As long as the player wishes to keep exploring, this loop will run.\n    while(exploring) {\n\n        // If still exploring, ask the player where they want to go next\n        std::cout \u003C\u003C \"Where will \" \u003C\u003C playerName \u003C\u003C \" go next?\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"1. Moonlight Markets\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"2. Grand Library\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"3. Shimmering Lake\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"Please enter your choice: \";\n        // Update value of choice\n        std::cin >> choice;\n\n        // Respond based on the player's main choice\n        switch(choice) {\n```\n\n\nOur `while` loop will keep running as long as `exploring` is `true`, so we\nneed a way for the player to have the option to exit the game. Let's add a\ncase 4 that allows the player to exit by setting `exploring = false`. This\nwill exit the loop and take the player back to the original choices.\n\n\n```cpp\n    // Option to exit the game\n    case 4:\n        exploring = false;\n        break;\n```\n\n\n**Async exercise**: Give the player the option to exit the game instead of\nexploring a new decision.\n\n\nWe also need to update the error handling for invalid inputs in the `switch`\nstatement. You can decide whether to end the program or use the `continue`\nstatement to start a new loop iteration.\n\n\n```cpp\n        default:\n            std::cout \u003C\u003C \"You did not enter a valid choice.\" \u003C\u003C std::endl;\n            continue; // Errors continue with the next loop iteration\n```\n\n\nUsing I/O and conditionals is at the core of text-based adventure games and\nhelps make these games interactive. We can combine user input, display\noutput, and implement our narrative into decision-making logic to create an\nengaging experience.\n\n\nHere's what our `adventure.cpp` looks like now with some comments:\n\n\n```cpp\n\n#include \u003Ciostream> // Include the I/O stream library for input and output\n\n\n// Main function, the starting point of the program\n\nint main()\n\n{\n    std::cout \u003C\u003C \"Welcome to the Dragon Realm!\" \u003C\u003C std::endl;\n\n    // Declare a string variable to store the player name\n    std::string playerName;\n\n    // Prompt the user to enter their player name\n    std::cout \u003C\u003C \"Please enter your name: \";\n    std::cin >> playerName;\n\n    // Display a personalized welcome message to the player with their name\n    std::cout \u003C\u003C \"Welcome \" \u003C\u003C playerName \u003C\u003C \" to The Dragon Realm!\" \u003C\u003C std::endl;\n\n    // Declare an int variable to capture the user's choice\n    int choice;\n    // Declare an int variable to capture the user's nested choice\n    int nestedChoice;\n\n    // Initialize a flag to control the loop and signify the player's intent to explore.\n    bool exploring = true;\n    // As long as the player wishes to keep exploring, this loop will run.\n    while(exploring) {\n\n        // If still exploring, ask the player where they want to go next\n        std::cout \u003C\u003C \"Where will \" \u003C\u003C playerName \u003C\u003C \" go next?\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"1. Moonlight Markets\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"2. Grand Library\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"3. Shimmering Lake\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"Please enter your choice: \";\n        // Update value of choice\n        std::cin >> choice;\n\n        // Respond based on the player's main choice\n        switch(choice) {\n            //  Handle the Moonlight Markets scenario\n            case 1:\n                std::cout \u003C\u003C \"You chose Moonlight Markets.\" \u003C\u003C std::endl;\n                break;\n            // Handle the Grand Library scenario.\n            case 2:\n                std::cout \u003C\u003C \"You chose Grand Library.\" \u003C\u003C std::endl;\n                break;\n            // Handle the Shimmering Lake scenario.\n            case 3:\n                std::cout \u003C\u003C playerName \u003C\u003C \" arrives at Shimmering Lake. It is one of the most beautiful lakes that\" \u003C\u003C playerName \u003C\u003C \" has seen. They hear a mysterious melody from the water. They can either: \" \u003C\u003C std::endl;\n                std::cout \u003C\u003C \"1. Stay quiet and listen\" \u003C\u003C std::endl;\n                std::cout \u003C\u003C \"2. Sing along with the melody\" \u003C\u003C std::endl;\n                std::cout \u003C\u003C \"Please enter your choice: \";\n\n                // Capture the user's nested choice\n                std::cin >> nestedChoice;\n\n                // If the player chooses to remain silent\n                if (nestedChoice == 1)\n                {\n                    std::cout \u003C\u003C \"Remaining silent, \" \u003C\u003C playerName \u003C\u003C \" hears whispers of the merfolk below, but nothing happens.\" \u003C\u003C std::endl;\n                }\n                // If the player chooses to sing along with the melody\n                else if (nestedChoice == 2)\n                {\n                    std::cout \u003C\u003C \"Singing along, a merfolk surfaces and gifts \" \u003C\u003C playerName\n                            \u003C\u003C \" a special blue gem as a token of appreciation for their voice.\"\n                            \u003C\u003C std::endl;\n                }\n                break;\n            // Option to exit the game\n            case 4:\n                exploring = false;\n                break;\n            // If 'choice' is not 1, 2, or 3, this block is executed.\n            default:\n                std::cout \u003C\u003C \"You did not enter a valid choice.\" \u003C\u003C std::endl;\n                continue; // Errors continue with the next loop iteration\n        }\n    }\n\n    // Return 0 to indicate successful execution\n    return 0;\n}\n\n```\n\n\nHere's what the build output looks like if we run `adventure.cpp` and the\nplayer heads to the Shimmering Lake.\n\n\n![adventure.cpp build output - the player is called sugaroverflow and heads\nto the Shimmering Lake and receives a\ngem](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/3.6.1-full-case-3-output.png){:\n.shadow}\n\n\n## Structuring the narrative: Characters\n\nOur player can now explore the world. Soon, our player will also be able to\nmeet people and collect objects. Before we can do that, let's organize the\nthings our player can do with creating some structure for the player\ncharacter.\n\n\nIn C++, a `struct` is used to group different data types. It's helpful in\ncreating a group of items that belong together, such as our player's\nattributes and inventory, into a single unit. `struct` objects are defined\nglobally, which means at top the file, before the `main() function.\n\n\nIf you start typing `struct Player {`, Code Suggestions will help you out\nwith a sample definition of a player struct.\n\n\n![adventure.cpp - Code Suggestions helps with setting up the struct\ndefinition for the\nplayer](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/4-player-struct-definition.png){:\n.shadow}\n\n\nAfter accepting this suggestion, you might find that Code Suggestions is\neager to define some functions to make this game more fun, such as hunting\nfor treasure.\n\n\n![adventure.cpp - Code Suggestions provides a suggestion for creating\nfunctions to hunt for\ntreasure.](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/4.1-player-struct-treasure-suggestion.png){:\n.shadow}\n\n\n```cpp\n\n// Define a structure for a Player in the game.\n\nstruct Player{\n    std::string name;  // The name of the player.\n    int health;        // The current health of the player.\n    int xp;            // Experience points gained by the player. Could be used for leveling up or other game mechanics.\n};\n\n```\n\n\nGiving the player experience points was not in my original plan for this\ntext adventure game, but Code Suggestions makes an interesting suggestion.\nWe could use `xp` for leveling up or for other game mechanics as our project\ngrows.\n\n\n`struct Player` provides a blueprint for creating a player and details the\nattributes that make up a player. To use our player in our code, we must\ninstantiate, or create, an object of the `Player` struct within our `main()`\nfunction. Objects in C++ are instances of structures that contain\nattributes. In our example, we're working with the `Player` struct, which\nhas attributes like name, health, and xp.\n\n\nAs you're creating a `Player` object, you might find that Code Suggestions\nwants to name the player \"John.\"\n\n\n![adventure.cpp - code suggestions suggests naming the new Player object\nJohn.](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/4.2-player-struct-instance-john.png){:\n.shadow}\n\n\n```cpp\n\nint main() {\n    // Create an instance of the Player struct\n    Player player;\n    player.health = 100; // Assign a default value for HP\n```\n\n\nInstead of naming our player \"John\" for everyone, we'll use the `Player`\nobject to set the attribute for name. When we want to interact with or\nmanipulate an attribute of an object, we use the dot operator `.`. The dot\noperator allows us to access specific members of the object. We can set the\nplayer's name using the dot operator with `player.name`.\n\n\nNote that we need to replace other mentions of `playerName` the variable\nwith `player.name`, which allows us to access the player object's name\ndirectly.\n\n\n* Search for all occurrences of the `playerName` variable, and replace it\nwith `player.name`.\n\n* Comment/Remove the unused `std::string playerName` variable after that.\n\n\nWhat your `adventure.cpp` will look like now:\n\n\n```cpp\n\n#include \u003Ciostream> // Include the I/O stream library for input and output\n\n\n// Define a structure for a Player in the game.\n\nstruct Player{\n    std::string name;  // The name of the player.\n    int health;        // The current health of the player.\n    int xp;            // Experience points gained by the player. Could be used for leveling up or other game mechanics.\n};\n\n\n// Main function, the starting point of the program\n\nint main()\n\n{\n    std::cout \u003C\u003C \"Welcome to the Dragon Realm!\" \u003C\u003C std::endl;\n\n    // Create an instance of the Player struct\n    Player player;\n    player.health = 100; // Assign a default value for HP\n\n    // Prompt the user to enter their player name\n    std::cout \u003C\u003C \"Please enter your name: \";\n    std::cin >> player.name;\n\n    // Display a personalized welcome message to the player with their name\n    std::cout \u003C\u003C \"Welcome \" \u003C\u003C player.name \u003C\u003C \" to The Dragon Realm!\" \u003C\u003C std::endl;\n\n    // Declare an int variable to capture the user's choice\n    int choice;\n    // Declare an int variable to capture the user's nested choice\n    int nestedChoice;\n\n    // Initialize a flag to control the loop and signify the player's intent to explore.\n    bool exploring = true;\n    // As long as the player wishes to keep exploring, this loop will run.\n    while(exploring) {\n\n        // If still exploring, ask the player where they want to go next\n        std::cout \u003C\u003C \"Where will \" \u003C\u003C player.name \u003C\u003C \" go next?\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"1. Moonlight Markets\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"2. Grand Library\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"3. Shimmering Lake\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"Please enter your choice: \";\n        // Update value of choice\n        std::cin >> choice;\n\n        // Respond based on the player's main choice\n        switch(choice) {\n            //  Handle the Moonlight Markets scenario\n            case 1:\n                std::cout \u003C\u003C \"You chose Moonlight Markets.\" \u003C\u003C std::endl;\n                break;\n            // Handle the Grand Library scenario.\n            case 2:\n                std::cout \u003C\u003C \"You chose Grand Library.\" \u003C\u003C std::endl;\n                break;\n            // Handle the Shimmering Lake scenario.\n            case 3:\n                std::cout \u003C\u003C player.name \u003C\u003C \" arrives at Shimmering Lake. It is one of the most beautiful lakes that\" \u003C\u003C player.name \u003C\u003C \" has seen. They hear a mysterious melody from the water. They can either: \" \u003C\u003C std::endl;\n                std::cout \u003C\u003C \"1. Stay quiet and listen\" \u003C\u003C std::endl;\n                std::cout \u003C\u003C \"2. Sing along with the melody\" \u003C\u003C std::endl;\n                std::cout \u003C\u003C \"Please enter your choice: \";\n\n                // Capture the user's nested choice\n                std::cin >> nestedChoice;\n\n                // If the player chooses to remain silent\n                if (nestedChoice == 1)\n                {\n                    std::cout \u003C\u003C \"Remaining silent, \" \u003C\u003C player.name \u003C\u003C \" hears whispers of the merfolk below, but nothing happens.\" \u003C\u003C std::endl;\n                }\n                // If the player chooses to sing along with the melody\n                else if (nestedChoice == 2)\n                {\n                    std::cout \u003C\u003C \"Singing along, a merfolk surfaces and gifts \" \u003C\u003C player.name\n                            \u003C\u003C \" a special blue gem as a token of appreciation for their voice.\"\n                            \u003C\u003C std::endl;\n                }\n                break;\n            // Option to exit the game\n            case 4:\n                exploring = false;\n                break;\n            // If 'choice' is not 1, 2, or 3, this block is executed.\n            default:\n                std::cout \u003C\u003C \"You did not enter a valid choice.\" \u003C\u003C std::endl;\n                continue; // Errors continue with the next loop iteration\n        }\n    }\n\n    // Return 0 to indicate successful execution\n    return 0;\n}\n\n```\n\n\n## Structuring the narrative: Items\n\nAn essential part of adventure games is a player's inventory - the\ncollection of items they acquire and use during their journey. For example,\nat Shimmering Lake, the player acquired a blue gem.\n\n\nLet's update our Player `struct` to include an inventory using an array. In\nC++, an `array` is a collection of elements of the same type that can be\nidentified by an index. When creating an array, you need to specify its type\nand size. Start by adding `std::string inventory` to the Player `struct`:\n\n\n![adventure.cpp - Code Suggestions shows us how to add an array of strings\nto the player struct to use as the players\ninventory](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/5-add-inventory-player-struct.png){:\n.shadow}\n\n\nYou might find that Code Suggestions wants our player to be able to carry\nsome gold, but we don't need that for now. Let's also add `int\ninventoryCount;` to keep track of the number of items in our player's\ninventory.\n\n\n![adventure.cpp - Code Suggestions shows us how to add an integer for\ninventoryCount to the player\nstruct](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/5.1-add-inventory-count-player-struct.png){:\n.shadow}\n\n\n```cpp\n\n// Define a structure for a Player in the game.\n\nstruct Player{\n    std::string name;  // The name of the player.\n    int health;        // The current health of the player.\n    int xp;            // Experience points gained by the player. Could be used for leveling up or other game mechanics.\n    std::string inventory[10];  // An array of strings for the player's inventory.\n    int inventoryCount = 0;  // The number of items in the player's inventory.\n};\n\n```\n\nIn our Player `struct`, we have defined an array for our inventory that can\nhold the names of 10 items (type:string, size: 10). As the player progresses\nthrough our story, we can assign new items to the inventory array based on\nthe player's actions using the array index.\n\n\nSometimes Code Suggestions gets ahead of me and tries to add more complexity\nto the game by suggesting that we need to create a `struct` for some\nMonsters. Maybe later, Code Suggestions!\n\n\n![adventure.cpp - Code Suggestions wants to add a struct for Monsters we can\nbattle](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/5.2-suggestion-gets-distracted-by-monsters.png\n\n){: .shadow}\n\n\nBack at the Shimmering Lake, the player received a special blue gem from the\nmerfolk. Let's update the code in `case 2` for the Shimmering Lake to add\nthe gem to our player's inventory.\n\n\nYou can start by accessing the player's inventory with `player.inventory`\nand Code Suggestions will help add the gem.\n\n\n![adventure.cpp - Code Suggestions shows us how to add a gem to the player's\ninventory using a post-increment operation and the inventory array from the\nstruct\nobject](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/5.3-add-gem-to-inventory.png){:\n.shadow}\n\n\n```cpp\n    // If the player chooses to sing along with the melody\n    else if (nestedChoice == 2)\n    {\n        std::cout \u003C\u003C \"Singing along, a merfolk surfaces and gifts \" \u003C\u003C player.name\n                \u003C\u003C \" a special blue gem as a token of appreciation for their voice.\"\n                \u003C\u003C std::endl;\n        player.inventory[player.inventoryCount] = \"Blue Gem\";\n        player.inventoryCount++;\n    }\n```\n\n\n* `player.inventory`: accesses the inventory attribute of the player object\n\n* `player.inventoryCount`: accesses the integer that keeps track of how many\nitems are currently in the player's inventory. This also represents the next\navailable index in our inventory array where an item can be stored.\n\n* `player.inventoryCount++`: increments the value of inventoryCount by 1.\nThis is a post-increment operation. We are adding “Blue Gem” to the next\navailable slot in the inventory array and incrementing the array for the\nnewly added item.\n\n\nOnce we've added something to our player's inventory, we may also want to be\nable to look at everything in the inventory. We can use a `for` loop to\niterate over the inventory array and display each item.\n\n\nIn C++, a `for` loop allows code to be repeatedly executed a specific number\nof times. It's different from the `while` loop we used earlier because the\n`while` executes its body based on a condition, whereas a `for` loop\niterates over a sequence or range, usually with a known number of times.\n\n\nAfter adding the gem to the player's inventory, let's display all the items\nit has. Try starting a for loop with `for ( ` to display the player's\ninventory and Code Suggestions will help you with the syntax.\n\n\n![adventure.cpp - Code Suggestions demonstrates how to write a for loop to\nloop through the players\ninventory](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/5.4-loop-over-players-inventory.png){:\n.shadow}\n\n\n```cpp\n\nstd::cout \u003C\u003C player.name \u003C\u003C \"'s Inventory:\" \u003C\u003C std::endl;\n\n// Loop through the player's inventory up to the count of items they have\n\nfor (int i = 0; i \u003C player.inventoryCount; i++)\n\n{\n    // Output the item in the inventory slot\n    std::cout \u003C\u003C \"- \" \u003C\u003C player.inventory[i] \u003C\u003C std::endl;\n}\n\n```\n\n\nA `for` loop consists of 3 main parts:\n\n\n* `int i = 0`: is the initialization where you set up your loop variable.\nHere, we start counting from 0.\n\n* `i \u003C player.inventoryCount`: is the condition we're looping on, our loop\nchecks if `i`, the current loop variable, is less than the number of items\nin our inventory. It will keep going until this is true.\n\n* `i++`: is the iteration. This updates the loop variable each time the loop\nruns.\n\n\nTo make sure that our loop doesn't encounter an error, let's add some error\nhandling to make sure the inventory is not empty when we try to output it.\n\n\n```\n\nstd::cout \u003C\u003C player.name \u003C\u003C \"'s Inventory:\" \u003C\u003C std::endl;\n\n// Loop through the player's inventory up to the count of items they have\n\nfor (int i = 0; i \u003C player.inventoryCount; i++)\n\n{\n    // Check if the inventory slot is not empty.\n    if (!player.inventory[i].empty())\n    {\n        // Output the item in the inventory slot\n        std::cout \u003C\u003C \"- \" \u003C\u003C player.inventory[i] \u003C\u003C std::endl;\n    }\n}\n\n```\n\n\nWith our progress so far, we've successfully established a persistent\n`while` loop for our adventure, handled decisions, crafted a `struct` for\nour player, and implemented a simple inventory system. Now, let's dive into\nthe next scenario, the Grand Library, applying the foundations we've\nlearned.\n\n\n**Async exercise**: Add more inventory items found in different locations.\n\n\nHere's what we have for `adventure.cpp` so far:\n\n\n```cpp\n\n#include \u003Ciostream> // Include the I/O stream library for input and output\n\n\n// Define a structure for a Player in the game.\n\nstruct Player{\n    std::string name;  // The name of the player.\n    int health;        // The current health of the player.\n    int xp;            // Experience points gained by the player. Could be used for leveling up or other game mechanics.\n    std::string inventory[10];  // An array of strings for the player's inventory.\n    int inventoryCount = 0;\n};\n\n\n// Main function, the starting point of the program\n\nint main()\n\n{\n    std::cout \u003C\u003C \"Welcome to the Dragon Realm!\" \u003C\u003C std::endl;\n\n    // Create an instance of the Player struct\n    Player player;\n    player.health = 100; // Assign a default value for HP\n\n    // Prompt the user to enter their player name\n    std::cout \u003C\u003C \"Please enter your name: \";\n    std::cin >> player.name;\n\n    // Display a personalized welcome message to the player with their name\n    std::cout \u003C\u003C \"Welcome \" \u003C\u003C player.name \u003C\u003C \" to The Dragon Realm!\" \u003C\u003C std::endl;\n\n    // Declare an int variable to capture the user's choice\n    int choice;\n    // Declare an int variable to capture the user's nested choice\n    int nestedChoice;\n\n    // Initialize a flag to control the loop and signify the player's intent to explore.\n    bool exploring = true;\n    // As long as the player wishes to keep exploring, this loop will run.\n    while(exploring) {\n\n        // If still exploring, ask the player where they want to go next\n        std::cout \u003C\u003C \"--------------------------------------------------------\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"Where will \" \u003C\u003C player.name \u003C\u003C \" go next?\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"1. Moonlight Markets\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"2. Grand Library\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"3. Shimmering Lake\" \u003C\u003C std::endl;\n        std::cout \u003C\u003C \"Please enter your choice: \";\n        // Update value of choice\n        std::cin >> choice;\n\n        // Respond based on the player's main choice\n        switch(choice) {\n            //  Handle the Moonlight Markets scenario\n            case 1:\n                std::cout \u003C\u003C \"You chose Moonlight Markets.\" \u003C\u003C std::endl;\n                break;\n            // Handle the Grand Library scenario.\n            case 2:\n                std::cout \u003C\u003C \"You chose Grand Library.\" \u003C\u003C std::endl;\n                break;\n            // Handle the Shimmering Lake scenario.\n            case 3:\n                std::cout \u003C\u003C player.name \u003C\u003C \" arrives at Shimmering Lake. It is one of the most beautiful lakes that\" \u003C\u003C player.name \u003C\u003C \" has seen. They hear a mysterious melody from the water. They can either: \" \u003C\u003C std::endl;\n                std::cout \u003C\u003C \"1. Stay quiet and listen\" \u003C\u003C std::endl;\n                std::cout \u003C\u003C \"2. Sing along with the melody\" \u003C\u003C std::endl;\n                std::cout \u003C\u003C \"Please enter your choice: \";\n\n                // Capture the user's nested choice\n                std::cin >> nestedChoice;\n\n                // If the player chooses to remain silent\n                if (nestedChoice == 1)\n                {\n                    std::cout \u003C\u003C \"Remaining silent, \" \u003C\u003C player.name \u003C\u003C \" hears whispers of the merfolk below, but nothing happens.\" \u003C\u003C std::endl;\n                }\n                // If the player chooses to sing along with the melody\n                else if (nestedChoice == 2)\n                {\n                    std::cout \u003C\u003C \"Singing along, a merfolk surfaces and gifts \" \u003C\u003C player.name\n                            \u003C\u003C \" a special blue gem as a token of appreciation for their voice.\"\n                            \u003C\u003C std::endl;\n                    player.inventory[player.inventoryCount] = \"Blue Gem\";\n                    player.inventoryCount++;\n\n                    std::cout \u003C\u003C player.name \u003C\u003C \"'s Inventory:\" \u003C\u003C std::endl;\n                    // Loop through the player's inventory up to the count of items they have\n                    for (int i = 0; i \u003C player.inventoryCount; i++)\n                    {\n                        // Check if the inventory slot is not empty.\n                        if (!player.inventory[i].empty())\n                        {\n                            // Output the item in the inventory slot\n                            std::cout \u003C\u003C \"- \" \u003C\u003C player.inventory[i] \u003C\u003C std::endl;\n                        }\n                    }\n\n                }\n                break;\n            // Option to exit the game\n            case 4:\n                exploring = false;\n                break;\n            // If 'choice' is not 1, 2, or 3, this block is executed.\n            default:\n                std::cout \u003C\u003C \"You did not enter a valid choice.\" \u003C\u003C std::endl;\n                continue; // Errors continue with the next loop iteration\n        }\n    }\n\n    // Return 0 to indicate successful execution\n    return 0;\n}\n\n```\n\n\n![adventure.cpp - A full output of the game at the current state - our\nplayer sugaroverflow visits the Lake, receives the gem, adds it to their\ninventory, and we display the inventory before returning to the\nloop](https://about.gitlab.com/images/blogimages/2023-08-21-building-a-text-adventure-using-cplusplus-and-code-suggestions/5.5-full-output-shimmering-lake.png){:\n.shadow}\n",[9,803,888,951,719],{"slug":1157,"featured":6,"template":698},"building-a-text-adventure-using-cplusplus-and-code-suggestions","content:en-us:blog:building-a-text-adventure-using-cplusplus-and-code-suggestions.yml","Building A Text Adventure Using Cplusplus And Code Suggestions","en-us/blog/building-a-text-adventure-using-cplusplus-and-code-suggestions.yml","en-us/blog/building-a-text-adventure-using-cplusplus-and-code-suggestions",{"_path":1163,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1164,"content":1170,"config":1178,"_id":1180,"_type":14,"title":1181,"_source":16,"_file":1182,"_stem":1183,"_extension":19},"/en-us/blog/building-gitlab-with-gitlabcom-how-gitlab-inspired-dedicated",{"title":1165,"description":1166,"ogTitle":1165,"ogDescription":1166,"noIndex":6,"ogImage":1167,"ogUrl":1168,"ogSiteName":685,"ogType":686,"canonicalUrls":1168,"schema":1169},"Building GitLab with GitLab: How GitLab.com inspired Dedicated","Learn how the multi-tenancy SaaS solution, GitLab.com, influenced the design of the single-tenancy SaaS, GitLab Dedicated.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659740/Blog/Hero%20Images/building-gitlab-with-gitlab-no-type.png","https://about.gitlab.com/blog/building-gitlab-with-gitlabcom-how-gitlab-inspired-dedicated","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Building GitLab with GitLab: How GitLab.com inspired Dedicated\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Andrew Newdigate\"},{\"@type\":\"Person\",\"name\":\"Craig Miskell\"},{\"@type\":\"Person\",\"name\":\"John Coghlan\"}],\n        \"datePublished\": \"2023-08-03\",\n      }",{"title":1165,"description":1166,"authors":1171,"heroImage":1167,"date":1175,"body":1176,"category":928,"tags":1177},[1172,1173,1174],"Andrew Newdigate","Craig Miskell","John Coghlan","2023-08-03","\nEarlier this year, we announced [the general availability of GitLab Dedicated](https://about.gitlab.com/blog/gitlab-dedicated-available/), our single-tenancy software-as-a-service (SaaS) offering. Dedicated, which addresses the needs of customers with stringent compliance requirements while maintaining speed, efficiency, and security, was developed from the lessons we learned building and using GitLab.com, our multi-tenancy model. Although there is overlap in how we manage both platforms, such as the same service-level monitoring stack, there were significant considerations that sparked the need for new design decisions, including how we approach automation, databases, monitoring, and availability. In this blog, we share some of those decision points and their outcomes.\n\n## GitLab platform options\nBefore we dive into the evolution of GitLab Dedicated, let’s level-set on GitLab’s [portfolio of platform models](https://docs.gitlab.com/ee/subscriptions/choosing_subscription.html#choose-a-subscription):\n- GitLab.com, a.k.a. multi-tenant GitLab SaaS on our pricing page and in our documentation\n- GitLab Dedicated, single-tenant SaaS that satisfies compliance requirements such as data residency, isolation, and private networking\n- GitLab self-managed, in which customers install, administer, and maintain their own GitLab instance\n\nEach method meets the different needs of our wide range of customers and requires a unique approach for how we create, package, and deploy the application.\n\nWhile both GitLab.com and Dedicated are SaaS-based, there are key differences between the two. The multi-tenant GitLab.com is the largest hosted instance of GitLab and services thousands of customers and millions of users. Because the platform's reliability is critical to so many customers and because of the iterative nature of how GitLab.com was built, decisions have been made along the way that are unique to the scale of this specific instance.\n\nIn contrast, GitLab Dedicated is a single-tenant SaaS application that is hosted by GitLab in the customer's region of choice (GitLab.com is hosted in the U.S.). While still providing a GitLab-managed SaaS solution for our customers, Dedicated instances are fully isolated from one another, running on a platform that automates the configuration and provisioning of the instances, along with automating as many of the day-two operations as possible, such as maintenance, monitoring, and optimization.\n\nHere are some examples of how Dedicated has used the blueprint of GitLab.com.\n\n## Improved automated deployments\nGitLab.com is a permanent installation with a great deal of history, having evolved significantly since it was first developed. Originally, it was deployed on a single instance in Amazon AWS, before migrating to Microsoft Azure, where it continued to scale out. From Azure, it migrated to its current cloud, Google Cloud Platform. Since then, many customer workloads have [migrated into Kubernetes](https://about.gitlab.com/blog/year-of-kubernetes/) and are supported by the Google Kubernetes Engine ([GKE](https://cloud.google.com/kubernetes-engine)).\n\nWith GitLab Dedicated, we're building smaller instances that rely on automation, repeatability, and deterministic environments. All customer tenant GitLab instance operations must be 100% automated, including provisioning, upgrades, scaling, configuration changes, and any other routine operations. The stack relies heavily on the GitLab Environment Toolkit ([GET](https://gitlab.com/gitlab-org/gitlab-environment-toolkit/-/blob/main/docs/environment_advanced_hybrid.md)) Cloud Native Hybrid, which uses the GitLab Helm charts for stateless workloads (e.g., Rails) and Omnibus for deployments to VMs (e.g., Gitaly). GET helps with the deployments targeting [reference architectures](https://docs.gitlab.com/ee/administration/reference_architectures/) and coordinating the provisioning of cloud resources, including compute instances, Kubernetes clusters, managed Postgres databases and more.\n\nAs much as GET automates, it has a certain amount of required setup, which is acceptable to perform manually for one-off or otherwise long-lived deployments, but in order to scale Dedicated we also had to automate that process, which we did with Terraform. Because this was a greenfield approach, we were able to be particularly careful with privileges. Our current cloud deployment target is AWS, so we developed a detailed identity and access management ([IAM](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html)) policy to grant each stage of deployment only the strictly necessary access. We also use IAM role assumption from trusted workloads in a central AWS account to eliminate the need for explicit credentials.  \n\nDeployments follow this process in order:\n- An account creation job running from a trusted location creates a fresh AWS account in an [AWS Organization](https://docs.aws.amazon.com/organizations/index.html), placing it in the correct Organizational Unit to automatically have a [CloudFormation StackSet](https://docs.aws.amazon.com/organizations/latest/userguide/services-that-can-integrate-cloudformation.html) applied, with ongoing updates handled by AWS when needed. This allows us to operate the entire lifecycle of the tenant account using IAM Role Assumption rather than generating and storing static IAM credentials.\n- Prepare stage sets up a fresh AWS account ready to receive a deployment; the privileges are quite high powered, but still limited to the necessary areas, including creating the next role.  \n- Onboard stage creates some high-level resources and otherwise does the setup that GET requires to be able to run, including creating the roles for the next stages with their own limited privileges.  \n- Provision stage is mostly about running GET Terraform and creating the compute and storage resources onto which GitLab will be deployed, with a few additions for our specific needs.  \n- Configure stage runs to deploy the GitLab application onto the resources created earlier. At its core, this is the GET Ansible stage, but it includes our own Terraform wrapper as well to handle our specific needs.\n\nOnce these stages complete, a fully deployed GitLab instance is ready to go.  \n\nConfiguration changes and GitLab upgrades execute the same set of stages, ensuring everything is still configured correctly and applying any pending changes. In the early days of GitLab Dedicated this was done in GitLab CI/CD pipelines operating on GitLab.com, with the tenant descriptions as JSON files in a repository, which was an effective and simple place to start.  \n\nHowever, this multi-stage deployment is now managed by [Switchboard](https://about.gitlab.com/direction/saas-platforms/switchboard/), a portal we built specifically for GitLab Dedicated. Switchboard is a bespoke Rails application, which will be the single source of truth for configuration, accessible by customers to manage customer-facing settings, as well as GitLab Dedicated staff for general management. Switchboard will be responsible for automating regular upgrades, including gradual rollouts across the fleet of Dedicated instances.\n\n## Databases geared towards the needs of single tenancy\nGitLab.com uses self-managed Postgres and Redis. For GitLab Dedicated, we wanted to leverage AWS’s managed services as much as possible. Examples include RDS, Elasticache, and OpenSearch, the AWS Elasticsearch managed service. Some of these services may not always be able to support GitLab.com-scale platforms, but they handle the traffic of a single-tenant instance well and provide reliable failovers and ongoing maintenance with no effort on our part.\n\n## Monitoring aligned with strict compliance needs\nThe observability stack for GitLab Dedicated relies on the expertise we gained from building GitLab.com. The monitoring, logging, and availability infrastructure is all maintained within the customer's AWS account, nothing is shared. We receive low-context alerts from these private systems. They serve as a mechanism to direct us to the customer account so we can review what is going on and triage the underlying issues if needed. This is helpful with regulators and compliance as nothing can leak because it doesn't leave the system.\n\nWhile Dedicated and GitLab.com share much of the same monitoring stack, Dedicated instances have tended to reveal different issues within our application. This is due to GitLab.com being a multi-tenant instance, while GitLab Dedicated instances are single-tenant. \n\nThink of the adage, \"[Your 9s are not my 9s](https://rachelbythebay.com/w/2019/07/15/giant/).\" In a platform at the scale of GitLab.com, a subset of users who encounter an issue in part of the application may be a very small percentage of the overall user base. The small impact relative to the scale of the platform may not create an alert. In a single-tenant instance, however, the same bugs or scaling issues can quickly impact a higher percentage of the overall users of the instance, escalating the issue's importance. Applying our service-level monitoring to single-tenant GitLab instances has benefited GitLab users who had encountered bugs that were overlooked in the volume of GitLab.com usage. When we identify issues in a Dedicated instance, we resolve them within the product.\n\n## High availability for all components\nConsidering the hybrid environment and the level of service that we want to offer to our customers, we have made some minor changes from the [standard reference architecture](https://docs.gitlab.com/ee/administration/reference_architectures/).\n\nOne such change is introducing high availability for all components. For the lower size (i.e., up to 2,000 users), our architecture ships by default with all the components in full redundant mode. Components like RDS and Elasticache will have a replica in a different Availability Zone. This is referred to as the primary region and we have to define how it will look in the [Geo replicas](https://docs.gitlab.com/ee/administration/geo/setup/database.html).\n\n## Only on Dedicated\nIn addition to the other changes we made, we also built some features that are only used for GitLab Dedicated:\n- Bring your own key - customers can provide and manage the encryption keys used to encrypt AWS resources such as storage, allowing a customer to revoke access should that ever become necessary. This is not something that can be offered in a multi-tenant system like GitLab.com.\n- Switchboard - as mentioned above, Switchboard was purpose-built for Dedicated. It is a multi-tenant Ruby on Rails application, accessible by GitLab Dedicated customer administrators and GitLab Dedicated team members. Using this interface, customers can change the available application runtime settings, access provided graphs, add additional products, and more. The main Switchboard instance serves as a single source of truth for global configuration and status across multiple cloud providers and regions.\n- PrivateLink networking - allows traffic between tenant AWS accounts and customer accounts without exposing data to the internet. \n- Other network features - including traffic filtering and private hosted zones.\n\nDedicated has been an exciting project and a great learning experience for our team. We were able to apply the knowledge accumulated in building GitLab.com to deliver an important new product for our customers in a very efficient way. You can learn more about GitLab Dedicated by visiting our [Dedicated page](https://about.gitlab.com/dedicated/) or contacting a GitLab sales representative.\n\n_Check out the [first installment in our \"Building GitLab with GitLab\" series](https://about.gitlab.com/blog/building-gitlab-with-gitlab-api-fuzzing-workflow/), which takes you behind the scenes of the development of our web API fuzz testing._\n",[805,9,951,695],{"slug":1179,"featured":6,"template":698},"building-gitlab-with-gitlabcom-how-gitlab-inspired-dedicated","content:en-us:blog:building-gitlab-with-gitlabcom-how-gitlab-inspired-dedicated.yml","Building Gitlab With Gitlabcom How Gitlab Inspired Dedicated","en-us/blog/building-gitlab-with-gitlabcom-how-gitlab-inspired-dedicated.yml","en-us/blog/building-gitlab-with-gitlabcom-how-gitlab-inspired-dedicated",{"_path":1185,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1186,"content":1192,"config":1199,"_id":1201,"_type":14,"title":1202,"_source":16,"_file":1203,"_stem":1204,"_extension":19},"/en-us/blog/certificate-based-kubernetes-integration-sunsetting-on-gitlab-com",{"title":1187,"description":1188,"ogTitle":1187,"ogDescription":1188,"noIndex":6,"ogImage":1189,"ogUrl":1190,"ogSiteName":685,"ogType":686,"canonicalUrls":1190,"schema":1191},"Certificate-based Kubernetes integration sunsetting on GitLab.com","Learn how to check if you are impacted by the sunsetting in May 2026 and the steps needed to migrate to our proposed alternatives, including the GitLab agent for Kubernetes.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662245/Blog/Hero%20Images/blog-image-template-1800x945__16_.png","https://about.gitlab.com/blog/certificate-based-kubernetes-integration-sunsetting-on-gitlab-com","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Certificate-based Kubernetes integration sunsetting on GitLab.com\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Viktor Nagy\"}],\n        \"datePublished\": \"2025-02-17\",\n      }",{"title":1187,"description":1188,"authors":1193,"heroImage":1189,"date":1195,"body":1196,"category":717,"tags":1197,"updatedDate":758},[1194],"Viktor Nagy","2025-02-17","__*Note: In a previously published version of this article, we stated that the certificate-based Kubernetes integration would be sunset in GitLab 18.0 in May 2025. That timeline has been extended to GitLab 19.0, planned for May 2026. See the [deprecation notice](https://docs.gitlab.com/update/deprecations/#gitlab-self-managed-certificate-based-integration-with-kubernetes) for details.*__\n\nThe certificate-based Kubernetes integration was [deprecated in GitLab November 2021](https://about.gitlab.com/blog/deprecating-the-cert-based-kubernetes-integration/), and is available on GitLab.com only to previous users. In May 2026, the integration will sunset on GitLab.com and will stop working. Customers often use the integration to deploy applications to production and non-production environments. As a result, failure to migrate to other options could cause a critical incident in your application delivery pipelines. This post outlines the alternative features that GitLab offers, points out how you can identify the potential impact on your GitLab.com groups and projects, and offers links to the GitLab documentation to learn more about the necessary migration steps.\n\n## Recommended alternative: The GitLab agent for Kubernetes\n\nThe GitLab agent for Kubernetes represents a significant advancement over the certificate-based integration, offering enhanced security, reliability, and functionality. Here are the key benefits of migrating to the agent-based approach:\n\n### Enhanced security  \n- Eliminates the need for storing cluster credentials in GitLab  \n- Provides secure, bidirectional communication between GitLab and your clusters  \n- Supports fine-grained access control and authorization policies  \n- Enables secure GitOps workflows with pull-based deployments\n\n### Improved reliability  \n- Maintains persistent connections, reducing deployment failures  \n- Handles network interruptions gracefully  \n- Provides better logging and troubleshooting capabilities  \n- Supports automatic reconnection and state recovery\n\n### Advanced features  \n- Real-time cluster information integrated into the GitLab UI  \n- Integration with GitLab CI/CD pipelines  \n- Support for multiple clusters and multi-tenant environments  \n- Enhanced GitOps capabilities by integrating with FluxCD\n\n## Get started with the GitLab agent for Kubernetes\n\nIf you haven't tried the GitLab Agent for Kubernetes yet, we strongly recommend going through the [getting started guides](https://docs.gitlab.com/ee/user/clusters/agent/getting_started). These guides will walk you through the basic setup and help you understand how the agent works in your environment. The hands-on experience will help make the migration process smoother.\n\n## Impact assessment\n\nWe implemented a [dedicated API](https://docs.gitlab.com/ee/api/cluster_discovery.html) endpoint to query all the certificate-based clusters within a GitLab group hierarchy. We recommend starting with this API to see if you have any clusters that need to be migrated.\n\nOnce you identify the clusters, you should:\n1. Find group and project owners using the certificate-based integration.  \n2. Check CI/CD pipelines for direct Kubernetes API calls.  \n3. Identify Auto DevOps projects using the old integration.  \n4. List any GitLab-managed clusters in use.  \n5. Set up the agent in the affected clusters. \n6. Follow the guidance provided in this post and record your progress in a tracking issue.\n\n## Update your CI/CD integration\n\nThe legacy certificate-based integration works using GitLab CI/CD. Because the agent seamlessly integrates with GitLab CI/CD pipelines, you can use it to replace the certificate-based integration with relatively little effort. The agent-based CI/CD integration offers several improvements over the certificate-based approach:\n\n1. **Direct cluster access:** CI/CD jobs can interact with clusters through the agent without requiring separate credentials.  \n2. **Enhanced security:** You don't need to store cluster credentials in CI/CD variables. \n3. **Simplified configuration:** A single agent configuration file manages all cluster interactions.  \n4. **Better performance:** Persistent connections reduce deployment overhead.  \n5. **Flexible authorization:** On GitLab Premium and Ultimate, you can rely on impersonation features to restrict CI/CD jobs in the cluster.\n\nAt a high level, there are three steps to migrating your existing CI/CD pipelines:  \n1. Set up the agent by following [the getting started guides](https://docs.gitlab.com/ee/user/clusters/agent/getting_started).  \n2. [Share the agent connection with the necessary groups and projects.](https://docs.gitlab.com/ee/user/clusters/agent/ci_cd_workflow.html#authorize-the-agent). \n3. [Select the agent in the pipeline jobs.](https://docs.gitlab.com/ee/user/clusters/agent/ci_cd_workflow.html#update-your-gitlab-ciyml-file-to-run-kubectl-commands)\n\nYou can read more about [migrating Kubernetes deployments in general](https://docs.gitlab.com/ee/user/infrastructure/clusters/migrate_to_gitlab_agent.html) or about [the agent CI/CD integration](https://docs.gitlab.com/ee/user/clusters/agent/ci_cd_workflow.html) in the documentation.\n\n## Migrate your Auto DevOps configuration\n\nAuto DevOps is a set of CI/CD templates that are often customized by users. With Auto DevOps, you can automatically configure your CI/CD pipelines to build, test, and deploy your applications based on best practices. It's commonly used with the certificate-based integration for deploying applications to Kubernetes clusters. \n\nIf you use Auto DevOps and you rely on the certificate-based integration, you need to transition to the agent-based deployment mechanism. The migration process is straightforward:\n1. Set up the CI/CD integration as described above.  \n2. Configure the `KUBE_CONTEXT` environment variable to select an agent.  \n4. Remove the old certificate-based cluster integration.\n\nYou can read more about [using Auto DevOps with the agent for Kubernetes](https://docs.gitlab.com/ee/user/clusters/agent/ci_cd_workflow.html\\#environments-that-use-auto-devops) in the documentation.\n\n## Transition from GitLab-managed clusters to GitLab-managed Kubernetes resources\n\nWith GitLab-managed clusters, GitLab automatically creates and manages Kubernetes resources for your projects. When you allow GitLab to manage your cluster, it creates RBAC resources like a Namespace and ServiceAccount. \n\nIf you use GitLab-managed clusters, you should transition to GitLab-managed Kubernetes resources, which offers a more flexible and secure approach to cluster management.\n\nTo migrate: \n1. Document your existing cluster configuration.  \n2. Create corresponding Kubernetes resource definitions.  \n3. Store configurations in your repository.  \n4. Configure the GitLab agent to manage these resources.  \n5. Verify resource management and deployment. \n6. Remove the old cluster integration.\n\nYou can read more about [GitLab-managed Kubernetes resources](https://docs.gitlab.com/ee/user/clusters/agent/getting\\_started) in the documentation.\n\n## Manage cloud provider clusters created through GitLab\n\nIf you created Kubernetes clusters through the GitLab integration with Google Kubernetes Engine (GKE) or Amazon Elastic Kubernetes Service (EKS), these clusters were provisioned in your respective cloud provider accounts. After the certificate-based integration is removed:\n1. Your clusters will remain fully operational in Google Cloud or AWS.  \n2. You will need to manage these clusters directly through your cloud provider's console:  \n   - GKE clusters through Google Cloud Console  \n   - EKS clusters through AWS Management Console\n\nTo view cluster information within GitLab:\n 1. Install the GitLab agent for Kubernetes. \n 1. Configure the Kubernetes dashboard integration.  \n 1. Check the dashboard for cluster details and resource information.\n\nThis change only affects how you interact with the clusters through GitLab – it does not impact the clusters' operation or availability in your cloud provider accounts.\n\nYou should still migrate your deployment setups as described above.\n\n## What should I do next?\n\nTo minimize the impact to you and your infrastructure, you should follow these steps:\n1. Check if you are impacted as soon as possible.  \n2. Plan your migration timeline before May 2026.  \n3. Start with non-production environments to gain experience.  \n4. Document your current setup and desired state.  \n5. Test the agent-based approach in a staging environment.  \n6. Gradually migrate production workloads.  \n7. Monitor and validate the new setup.\n\nThe migration to the GitLab agent for Kubernetes represents a significant improvement in how GitLab interacts with Kubernetes clusters. While the migration requires careful planning and execution, the benefits in terms of security, reliability, and functionality make it a worthwhile investment for your DevSecOps infrastructure.",[109,1198,717,9],"kubernetes",{"slug":1200,"featured":6,"template":698},"certificate-based-kubernetes-integration-sunsetting-on-gitlab-com","content:en-us:blog:certificate-based-kubernetes-integration-sunsetting-on-gitlab-com.yml","Certificate Based Kubernetes Integration Sunsetting On Gitlab Com","en-us/blog/certificate-based-kubernetes-integration-sunsetting-on-gitlab-com.yml","en-us/blog/certificate-based-kubernetes-integration-sunsetting-on-gitlab-com",{"_path":1206,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1207,"content":1212,"config":1218,"_id":1220,"_type":14,"title":1221,"_source":16,"_file":1222,"_stem":1223,"_extension":19},"/en-us/blog/changes-coming-to-url-structure-follow-deprecations-redirects",{"title":1208,"description":1209,"ogTitle":1208,"ogDescription":1209,"noIndex":6,"ogImage":772,"ogUrl":1210,"ogSiteName":685,"ogType":686,"canonicalUrls":1210,"schema":1211},"Bookmark these changes: URL structure updates coming in GitLab 17.0","An overview of project and user settings URL changes, including deprecations and redirects, that will happen in 17.0.","https://about.gitlab.com/blog/changes-coming-to-url-structure-follow-deprecations-redirects","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Bookmark these changes: URL structure updates coming in GitLab 17.0\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Christen Dybenko\"}],\n        \"datePublished\": \"2023-08-30\",\n      }",{"title":1208,"description":1209,"authors":1213,"heroImage":772,"date":1215,"body":1216,"category":1133,"tags":1217},[1214],"Christen Dybenko","2023-08-30","\nOver the next few releases GitLab will be making some changes to our URL structure. They mainly affect settings pages, but we're cleaning up a few other URLs as well. The URL structure represents the site map, and it should be both predictable and consistent.\nOver the years, page titles have begun to deviate from their original designation and these changes aim to rectify that.\nYou can see the full effect of these changes in the tables below.\n\nWe will be adding these as 301 redirects over the next few months, with a plan to remove the old routes entirely in our 17.0 release in May 2024. If you have any of these pages bookmarked, or rely on these URLs, they will continue to work up until the removal in 17.0.\n\nPlease share your feedback on this change in the [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/420675).\n\n## Page URL updates\nHere are the page URL updates for projects and user settings.\n\n### Project\n\n| Sidebar | Current Path | New Path |\n|---------|------|-------------|\n| Analyze / **Contributor statistics** | /-/\u003Cspan style=\"background: #fdd4cd;\">graphs\u003C/span>/{default branch name} | /-/\u003Cspan style=\"background: #c3e6cd;\">contributor_statistics\u003C/span>/{default branch name} |\n| Code / **Repository graph** | /-/\u003Cspan style=\"background: #fdd4cd;\">network\u003C/span>/{default branch name} | /-/\u003Cspan style=\"background: #c3e6cd;\">repository_graph\u003C/span>/{default branch name} |\n| Code / **Locked files** | /\u003Cspan style=\"background: #fdd4cd;\">path_locks\u003C/span> | \u003Cspan style=\"background: #c3e6cd;\">/-/locked_files\u003C/span> |\n| Monitor / **Alerts** | /-/\u003Cspan style=\"background: #fdd4cd;\">alert_management\u003C/span> | /-/\u003Cspan style=\"background: #c3e6cd;\">alerts\u003C/span> | \n| Settings / **Webhooks** | /-/\u003Cspan style=\"background: #fdd4cd;\">hooks\u003C/span> | /-/\u003Cspan style=\"background: #c3e6cd;\">settings/webhooks\u003C/span> | \n| Settings / **Monitor** | /-/settings/\u003Cspan style=\"background: #fdd4cd;\">operations\u003C/span> | /-/settings/\u003Cspan style=\"background: #c3e6cd;\">monitor\u003C/span> | \n\n### User settings\n\n| Sidebar | Current Path | New Path |\n|---------|------|---------|\n| User settings \u003Cbr>↳ Profile | /-/profile | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/profile | \n| User settings \u003Cbr>↳ Account | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/account | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/account |\n| User settings \u003Cbr>↳ Applications | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/applications | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/applications | \n| User settings \u003Cbr>↳ Chat | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/chat\u003Cspan style=\"background: #fdd4cd;\">\\_names\u003C/span> | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/chat | \n| User settings \u003Cbr>↳ Personal access tokens | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/personal_access_tokens | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/personal_access_tokens | \n| User settings \u003Cbr>↳ Emails | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/emails | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/emails | \n| User settings \u003Cbr>↳ Password | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/password/edit | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/password/edit | \n| User settings \u003Cbr>↳ Notifications | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/notifications | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/notifications | \n| User settings \u003Cbr>↳ SSH keys | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/keys | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/\u003Cspan style=\"background: #c3e6cd;\">ssh\u003C/span>\\_keys | \n| User settings \u003Cbr>↳ GPG keys | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/gpg_keys | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/gpg_keys | \n| User settings \u003Cbr>↳ Preferences | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/preferences | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/preferences | \n| User settings \u003Cbr>↳ Active sessions | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/active_sessions | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/active_sessions | \n| User settings \u003Cbr>↳ Authentication log | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/\u003Cspan style=\"background: #fdd4cd;\">audit_log\u003C/span> | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/\u003Cspan style=\"background: #c3e6cd;\">authentication_log\u003C/span> | \n| User settings \u003Cbr>↳ Usage quotas | /-/\u003Cspan style=\"background: #fdd4cd;\">profile\u003C/span>/usage_quotas | /-/\u003Cspan style=\"background: #c3e6cd;\">user_settings\u003C/span>/usage_quotas |\n",[1133,695,781,9],{"slug":1219,"featured":6,"template":698},"changes-coming-to-url-structure-follow-deprecations-redirects","content:en-us:blog:changes-coming-to-url-structure-follow-deprecations-redirects.yml","Changes Coming To Url Structure Follow Deprecations Redirects","en-us/blog/changes-coming-to-url-structure-follow-deprecations-redirects.yml","en-us/blog/changes-coming-to-url-structure-follow-deprecations-redirects",{"_path":1225,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1226,"content":1232,"config":1238,"_id":1240,"_type":14,"title":1241,"_source":16,"_file":1242,"_stem":1243,"_extension":19},"/en-us/blog/chat-about-your-merge-request-with-gitlab-duo",{"title":1227,"description":1228,"ogTitle":1227,"ogDescription":1228,"noIndex":6,"ogImage":1229,"ogUrl":1230,"ogSiteName":685,"ogType":686,"canonicalUrls":1230,"schema":1231},"Chat about your merge request with GitLab Duo","Learn how to use AI-powered Chat to quickly understand complex merge requests by asking about implementation choices, potential risks, and architectural decisions.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675536/Blog/Hero%20Images/blog-image-template-1800x945__2_.png","https://about.gitlab.com/blog/chat-about-your-merge-request-with-gitlab-duo","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Chat about your merge request with GitLab Duo\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Torsten Linz\"}],\n        \"datePublished\": \"2024-11-22\",\n      }",{"title":1227,"description":1228,"authors":1233,"heroImage":1229,"date":1235,"body":1236,"category":801,"tags":1237},[1234],"Torsten Linz","2024-11-22","Managing a merge request (MR) is an integral part of collaborative development, involving navigating through code changes, discussions, and dependencies to ensure high-quality outcomes. Whether you’re reviewing someone else’s code or trying to make your own changes clearer, the new [GitLab Duo Chat](https://about.gitlab.com/gitlab-duo/) capability, available in GitLab Duo Enterprise, can help simplify your workflow. Now, you can have a conversation with GitLab Duo Chat about an MR, directly inside GitLab.\n\n## What GitLab Duo Chat brings to an MR workflow\n\nImagine jumping into a merge request titled \"Add logging to order processing.\" Your goal is to onboard yourself to the MR as quickly as possible and to review it. You can use GitLab Duo Chat to onboard yourself faster and understand critical questions to accelerate your review:\n\n* \"Do the logs cover all failure scenarios, or are there any gaps where an issue might not be traceable?\"  \n* “Are there any potential privacy concerns with the logged data?\"  \n* \"Why was logging added at these specific points in the order processing workflow, and how does it help with debugging or monitoring?\"\n\n![MR context example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675670/Blog/Content%20Images/MR_Context_example.png)\n\nThese are the kinds of questions that GitLab Duo Chat is ready to answer – questions that let you quickly understand the intentions behind the changes and uncover any potential risks before diving into the details. Instead of spending a lot of time trying to follow code paths or waiting on the author to reply to your questions, you can start getting answers right away, saving valuable time.\n\n## In-depth conversations about MRs\n\nThe magic of this new chat capability isn’t just in summarizing code – it’s in its ability to support in-depth conversations about the MR at hand. Let's assume the logging MR also includes notifications and refactoring. You can ask specific, insightful questions, such as:\n\n* “What are the potential network failure points introduced by refactoring the payment service into a microservice?”  \n* \"Were there any trade-offs made in terms of consistency or accuracy for better performance?\"  \n* \"How are failures in sending notifications handled? Are retries implemented?\"\n\nInstead of simply telling you what changes have been made, GitLab Duo Chat helps you understand *why* those changes were made, what risks are involved, and how to mitigate them. It lets you dig deep and explore the context behind every line of code, every architectural decision, and every change in behavior within the specific MR you are working on.\n\nAnd it doesn't end with that one answer. You can engage in a follow-up conversation to dig deeper or to explore. \n\n## An evolving conversation tool\n\nWe’re really excited about how GitLab Duo Chat is evolving to become a true conversational partner for MR authors and reviewers alike. GitLab Duo Chat is [aware of the MR description, discussions, the code diff, and metadata of a single MR](https://docs.gitlab.com/ee/user/gitlab_duo_chat/index.html#the-context-chat-is-aware-of). It’s like having an assistant who is well-versed in your MR and ready to explain any part of it – or even rewrite parts, if that’s what you need.\n\nWith GitLab Duo Chat, onboarding yourself to a complex MR or understanding a change in-depth is faster and more intuitive than ever before.\n\n## We need your feedback\n\nWe’re eager to hear how GitLab Duo Chat works for you. All feedback helps us refine this feature and make it even more useful. Please share your experiences by commenting on our [issue tracker](https://gitlab.com/gitlab-org/gitlab/-/issues/464587). Please include the questions you asked, the response you got, and whether it helped you move forward. Together, we can make GitLab Duo Chat an indispensable tool for every merge request!\n\nFor a deeper dive into how to use GitLab Duo Chat, check out our [documentation](https://docs.gitlab.com/ee/user/gitlab_duo_chat/examples#ask-about-a-specific-merge-request) or watch our introductory video below. Start your first conversation today and let us know what you think!\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/4muvSFuWWL4?si=7W4mHWw2iUOzoTUz\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->  \n\n> Sample this new capability with [a free trial of GitLab Ultimate and GitLab Duo Enterprise](https://gitlab.com/-/trials/new).\n\n## Learn more about GitLab Duo Chat\n\n- [GitLab Duo Chat: Get to know productivity-boosting AI enhancements](https://about.gitlab.com/blog/gitlab-duo-chat-get-to-know-productivity-boosting-ai-enhancements/)\n- [GitLab Duo Chat, your at-the-ready AI assistant, is now generally available](https://about.gitlab.com/blog/gitlab-duo-chat-now-generally-available/)\n- [GitLab Duo Chat 101: Get more done on GitLab with our AI assistant](https://about.gitlab.com/blog/gitlab-duo-chat-101-get-more-done-on-gitlab-with-our-ai-assistant/)",[803,9,695,719,717,888],{"slug":1239,"featured":6,"template":698},"chat-about-your-merge-request-with-gitlab-duo","content:en-us:blog:chat-about-your-merge-request-with-gitlab-duo.yml","Chat About Your Merge Request With Gitlab Duo","en-us/blog/chat-about-your-merge-request-with-gitlab-duo.yml","en-us/blog/chat-about-your-merge-request-with-gitlab-duo",{"_path":1245,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1246,"content":1252,"config":1258,"_id":1260,"_type":14,"title":1261,"_source":16,"_file":1262,"_stem":1263,"_extension":19},"/en-us/blog/ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch",{"title":1247,"description":1248,"ogTitle":1247,"ogDescription":1248,"noIndex":6,"ogImage":1249,"ogUrl":1250,"ogSiteName":685,"ogType":686,"canonicalUrls":1250,"schema":1251},"CI/CD Catalog goes GA: No more building pipelines from scratch","The CI/CD Catalog becomes generally available in GitLab 17.0. Get to know the capabilities for discovering and sharing pipeline building blocks to help standardize and scale pipelines.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098794/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%289%29_DoeBNJVrhv9FpF3WCsHNc_1750098793762.png","https://about.gitlab.com/blog/ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"CI/CD Catalog goes GA: No more building pipelines from scratch\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Dov Hershkovitch\"}],\n        \"datePublished\": \"2024-05-08\",\n      }",{"title":1247,"description":1248,"authors":1253,"heroImage":1249,"date":1255,"body":1256,"category":717,"tags":1257},[1254],"Dov Hershkovitch","2024-05-08","GitLab's [CI/CD Catalog](https://docs.gitlab.com/ee/ci/components/#cicd-catalog) becomes generally available in 17.0 (May 16, 2024), enabling all GitLab users to discover, reuse, and contribute CI/CD components easily. The CI/CD Catalog boosts collaboration and efficiency when creating pipeline configurations by allowing access to a treasure trove of pre-built components, ready to seamlessly integrate into DevSecOps workflows. Enterprises can use the CI/CD Catalog's centralized platform to standardize workflows across the whole organization.\n\nWith the CI/CD Catalog, GitLab is introducing several key capabilities that are also generally available.\n\n> Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Watch today!](https://about.gitlab.com/seventeen/)\n\n## Components and inputs\nThe [CI/CD Catalog](https://about.gitlab.com/blog/introducing-the-gitlab-ci-cd-catalog-beta/) draws its strength from two fundamental features: components and inputs. These capabilities form the backbone of the catalog, enabling developers and DevSecOps teams to streamline their pipeline development. Let’s dive into each of these features:\n\n### Components\n\n#### What are components?\nComponents are reusable, single-purpose building blocks that abstract away the complexity of pipeline configuration. Think of them as Lego pieces for your CI/CD workflows. By using components, you can assemble pipelines more efficiently without starting from scratch each time.\n\n#### Types of components\n- Template-type components: These components resemble CI templates and come with predefined input definitions. They are organized within a specific directory structure, which you can easily plug into your pipelines.\n- CI Steps (upcoming): This new type of component, which is available as an [experimental feature](https://docs.gitlab.com/ee/ci/steps/), will become a first-class object in the CI/CD Catalog, so stay tuned for this exciting addition.\n\n### Inputs\n\n#### What is Inputs Interpolation?\n\nInputs Interpolation is a powerful feature that allows you to define input parameters for includable configuration files. By using the [spec: inputs keyword](https://docs.gitlab.com/ee/ci/yaml/#specinputs) within your component configuration, you can dynamically replace almost any keywords within components with parameters. This flexibility extends to adjusting stages, scripts, or job names, supporting various data types making the component fully flexible to your needs.\n\n##### Scoped and effective\nImportantly, inputs are scoped exclusively to the included configuration. This prevents unintended effects on the rest of your pipeline. With Inputs Interpolation, you can declare and enforce constraints seamlessly, ensuring smooth integration of components.\n\nWhether you’re a seasoned DevOps pro or just starting out, the CI/CD Catalog, components, and Inputs Interpolation will transform your pipeline development experience.\n\n## How to access CI/CD Catalog components\nThe CI/CD Catalog is a powerful resource for developers and DevOps teams. It allows you to share and discover pre-built components, streamlining your pipeline development. Here’s how it works:\n\n1. Components are standalone building blocks that simplify pipeline configuration. You can create custom components tailored to your needs. But how do you make them available to others? That’s where the CI/CD Catalog comes in.\n\n2. How to publish to the CI/CD Catalog\n    - To share your components with the community, follow these steps:\n      - Use a simple CI job to publish your component and make it discoverable in the CI/CD Catalog.\n      - Whether it’s a reusable script, a deployment template, or any other pipeline element, the CI/CD Catalog is the perfect place to contribute.\nComponents released to the CI/CD Catalog should be tagged with a [semantic version](https://docs.gitlab.com/ee/ci/components/#semantic-versioning) using three digits.\n    - By sharing your components, you contribute to a growing library of resources that benefit the entire community.\n3. Catalog index page\n    - The main page of the CI/CD Catalog (also known as the index page) provides an overview of available projects with published components. Anyone can access the catalog and search for a component that suits their needs.\n    - The index page features two tabs:\n      - All: Displays all component projects that have been published and visible to you.\n      - Your groups: Shows components published within a namespace you’re part of.\n\n![CI/CD Catalog](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098805/Blog/Content%20Images/Blog/Content%20Images/catalog_index_aHR0cHM6_1750098804807.png)\n\n4.  Catalog details page\n\n- Upon clicking on one of the projects in the CI/CD Catalog, you will be redirected to the details page where you can view the available components in that project. \n    - Note that there could be multiple components in a single project.\n\n- The details page features two tabs:\n\u003Ccenter>\u003Ci>Readme: Displays the readme.md of the project that was previously configured by the user.\u003C/i>\u003C/center>\n\n![readme tab](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098805/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098804808.png)\n\n\u003Ccenter>\u003Ci>Components: Displays the detailed information for each component such as inputs table syntax to use and more. This information is generated and displayed automatically to help keep it up to date.\u003C/i>\u003C/center>\n\n![components tab](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098805/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098804809.png)\n\n## Using a component\n\nTo use a component from the CI/CD Catalog, simply copy the suggested snippet to your pipeline configuration. For example: \n\n```yaml\n\ninclude: \n  - component:   gitlab.com/google-gitlab-components/cloud-run/deploy-cloud-run@0.1.0\n\n```\n\nNote that the snippet contains the fully qualified domain name of the component, so if you moved or clone the component to a different location, you should make sure the FQDN is accurate. You can use the $CI_SERVER_FQDN variable instead of hardcoding the FQDN in your pipeline configuration.\n\nA component can be referenced using the following:\n\n- a commit SHA, for example, e3262fdd0914fa823210cdb79a8c421e2cef79d. We highly recommend using this with $CI_COMMIT_SHA variable in your `.gitlab.ci.yml` file to test a component before publishing it to the CI/CD Catalog.\n- a branch name, for example, main\n- a tag, for example 1.0.0\n- shorthand abbreviation 1.0, which will provide you the latest patched 1.0.x version or 1, which will provide you the latest 1.x.x minor version. This is why it is recommended to use the best practices of semantic versioning and always reference a specific version (minor, major, or a specific patch).\n- ~latest, which always points to the latest semantic version published in the CI/CD Catalog. Use ~latest only if you want to use the absolute latest version at all times, which could include breaking changes., so please use it with caution.\n\n## Understanding the CI/CD Catalog across GitLab deployments\nThe CI/CD Catalog and components offer different flavors to cater to various needs and use cases.\n\n### Private and public components\n\n#### Public components\n\n- Public components are hosted in public repositories and are accessible to everyone.\n- When a public component is published from GitLab.com to the main catalog, it becomes discoverable and available for consumption by all users.\n- We encourage users to contribute their best components to the public catalog, helping us build a thriving community.\n\n#### Private components\n\n- Private components are hosted in private repositories.\n- Visibility based on permissions: Users who access the catalog can also see and search for private components if they have permission to view the repository where the component is hosted.\n    - Private catalog option: In GitLab.com, organizations can publish private components to the main catalog in GitLab.com, thereby creating a “private catalog” with content accessible only to authorized users. \n\n### GitLab.com vs. Self-managed\n- The “public” catalog in GitLab.com: The main catalog is the one that is hosted on GitLab.com and can be accessible to anyone by going to [gitlab.com/explore/catalog](http://gitlab.com/explore/catalog). The CI/CD Catalog is:\n    - Open access: The catalog hosted on GitLab.com is available for anyone to view.\n    - Contribute and grow: By sharing components, users around the world contribute to a growing library of resources that benefits the entire community.\n\n- Self-managed customers: The CI/CD Catalog is also available for self-managed customers however it has several differences: \n    - Empty catalog: For self-managed customers, the catalog initially appears empty since it doesn't contain any available components.\n    - Organizational catalog: Each organization is responsible for its own catalog, where it can create and maintain its own library of components within this flavor.\n    - Using a component from GitLab.com: If you want to use a component from the main catalog in GitLab.com, clone the project locally and publish it to your organizational catalog. Keep in mind that upstream updates will require mirroring to receive the latest changes. You can learn more about how to do that in our [CI/CD Components documentation](https://docs.gitlab.com/ee/ci/components/#use-a-gitlabcom-component-in-a-self-managed-instance).\n\n## What’s next?\n\nThe CI/CD Catalog is only the first step in revolutionizing the way you build and display your available pipelines. Here is a glimpse of what we plan to offer to our users in the upcoming milestones.\n\n### CI Steps\n\nSteps are reusable and composable pieces of a job that can be referenced in your pipeline configuration. Each step defines structured inputs and outputs that can be consumed by other steps. Steps can come from local files, GitLab.com repositories, or any other Git source.\n\nIn GitLab, we think of steps as another type of component. We are going to make sure CI Steps will become a first-class object in the CI/CD Catalog, where users can publish, unpublish, search, and consume steps in the same way as they are using components today.\n\n### Securing your catalog workflows\n\nWe aim to empower central administrators to manage component creation, usage, and publication within their organizational catalog. We are committed to ensuring the publishing process seamlessly integrates with the organization's standards and existing workflow. We want to enable the platform administrators with the capabilities to secure and govern the CI/CD Catalog and component workflows. More information can be found in [this epic](https://gitlab.com/groups/gitlab-org/-/epics/12713).\n\n### Analytics\n\nOur goal is to empower users with seamless control over component management across pipelines, ensuring optimal version control and project alignment. This addresses the challenge of users currently lacking visibility into component usage across various project pipelines. Our objective is to provide users with the capability to swiftly identify outdated versions and take prompt corrective actions as needed. This enhancement will foster an environment where users can efficiently manage and update components, promoting both version control precision and project alignment. Read more in [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/393326).\n\n## Get started with the CI/CD Catalog\n\nThe introduction of the CI/CD Catalog revolutionizes pipeline development by offering a vast array of pre-built components. Users don't have to start building pipelines from scratch because the CI/CD Catalog provides an access point to search components and pipeline configurations. The CI/CD Catalog's availability makes accessing and sharing components effortless, fostering collaboration and community growth. Whether utilizing public or private repositories, users can leverage these resources to enhance their pipeline development experience. Moreover, while GitLab.com users benefit from an open-access catalog, self-managed customers can establish organizational catalogs tailored to their needs.\n\n> [Get to know the CI/CD Catalog](https://about.gitlab.com/free-trial/devsecops/) with a free trial of GitLab Ultimate.\n\n> Learn more about the CI/CD Catalog and components:\n> \n> - [A CI/CD component builder's journey](https://about.gitlab.com/blog/a-ci-component-builders-journey/)\n>\n> - [FAQ: GitLab CI/CD Catalog](https://about.gitlab.com/blog/faq-gitlab-ci-cd-catalog/)\n>\n> - [Documentation: CI/CD components and CI/CD Catalog](https://docs.gitlab.com/ee/ci/components/)\n> \n> - [Introducing CI/CD components and how to use them in GitLab](https://about.gitlab.com/blog/introducing-ci-components/)\n> \n",[109,951,9,695],{"slug":1259,"featured":91,"template":698},"ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch","content:en-us:blog:ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch.yml","Ci Cd Catalog Goes Ga No More Building Pipelines From Scratch","en-us/blog/ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch.yml","en-us/blog/ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch",{"_path":1265,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1266,"content":1272,"config":1279,"_id":1281,"_type":14,"title":1282,"_source":16,"_file":1283,"_stem":1284,"_extension":19},"/en-us/blog/combine-gitlab-webhooks-and-twilio-for-sms-alerts-on-devsecops-platform",{"title":1267,"description":1268,"ogTitle":1267,"ogDescription":1268,"noIndex":6,"ogImage":1269,"ogUrl":1270,"ogSiteName":685,"ogType":686,"canonicalUrls":1270,"schema":1271},"Combine GitLab webhooks and Twilio for SMS alerts on DevSecOps platform","Configure GitLab webhooks with SMS alerts to instantly get feedback on new and existing issues within a project and enable teams to react quickly to project- and group-level changes.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099013/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2814%29_6VTUA8mUhOZNDaRVNPeKwl_1750099012960.png","https://about.gitlab.com/blog/combine-gitlab-webhooks-and-twilio-for-sms-alerts-on-devsecops-platform","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Combine GitLab webhooks and Twilio for SMS alerts on DevSecOps platform\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Ted Gieschen\"}],\n        \"datePublished\": \"2024-06-10\",\n      }",{"title":1267,"description":1268,"authors":1273,"heroImage":1269,"date":1275,"body":1276,"category":1277,"tags":1278},[1274],"Ted Gieschen","2024-06-10","We all strive to create the most robust and secure DevSecOps environments\nwhere everyone can collaborate to deliver amazing products for our\ncustomers. But no matter how robust and secure we design our environments we\ncannot exclude the possibility that something might go wrong. When an issue\ndoes occur we want to make sure we can remediate it quickly. To do that it's\nnot only important to document the details of the issue but also get the\nright people notified immediately. In this article, we will set up GitLab\n[webhooks](https://docs.gitlab.com/ee/user/project/integrations/webhooks.html)\ntogether with [Twilio's functionality](https://www.twilio.com/en-us) to\n[send SMS alerts](https://www.twilio.com/docs/messaging) to the right\npeople, getting them up to date so they can mitigate problems quickly.\n\n\n## Prerequisites\n\n\n1. A GitLab account: Webhooks aren't restricted by tier, which means this\nfeature can be used with a [Free, Premium or Ultimate\nlicense](https://about.gitlab.com/pricing/) for either [GitLab's SaaS or\nself-managed\noffering](https://docs.gitlab.com/ee/subscriptions/choosing_subscription.html).\nIf you don't have an account yet, you can create one on [our sign-up page](\nhttps://gitlab.com/users/sign_up).\n\n\n2. A Twilio account: To handle the incoming webhook and send an SMS, you\nwill need a Twilio account. If you don't already have one, you can create\none on [Twilio's sign-up page](https://www.twilio.com/try-twilio).\n\n\n3. (Optional) An SMS-capable phone to test the functionality: We will be\ntesting the functionality at the end of this article. If you want to follow\nalong, you will need access to a phone that can receive SMS texts.\n\n\n4. (Optional) A basic understanding of Node.js: We will be handling the\nwebhooks using a serverless function provided by Twilio Functions. This will\nbe written in [Node.js](https://nodejs.org/en/about). Although you can\nsimply copy-paste the functionality, it would be beneficial to understand\nthe basics of Node.js so you can expand functionality in the future.\n\n\n## Building automated SMS notifications\n\n\nNow, let's get hands-on with building real-time SMS notifications.\n\n\nAt a high level, the workflow looks as follows:\n\n\n![SMS\nworkflow](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750099023261.png)\n\n\n1. An event is triggered within GitLab. This event is then picked up by\nGitLab's webhook functionality.\n\n2. The information of the event is then sent as a webhook to a [Twilio\nFunction](https://www.twilio.com/docs/serverless/functions-assets/functions).\n\n3. Twilio Functions processes the event data sent by GitLab and creates the\nSMS body with relevant information.\n\n4. When complete, Twilio Functions triggers [Twilio Programmable\nMessaging](https://www.twilio.com/docs/messaging) with the SMS body and\nrecipient information.\n\n5. Twilio Programmable Messaging then sends the SMS with the generated body\nto the recipient.\n\n\n### Set up Twilio SMS\n\n\nWe need to set up our Twilio environment to be able to send SMS. To do this,\nlog in to your Twilio account. If you don't have one just follow the link\nprovided in the prerequisites section above.\n\n\nOnce logged in you will see the Twilio Console, which will look something\nlike this:\n\n\n![Twilio\nconsole](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750099023261.png)\n\n\nFrom here, we will head to the left sidebar menu and select __United States\n(US1) > Phone Numbers > Manage > Active numbers__ and then click the \"Buy a\nnumber\" button.\n\n\n![Buy a number\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image9_aHR0cHM6_1750099023263.png)\n\n\nYou can select a phone number, which will be the number that notifications\nare sent from. There are some\n[guidelines](https://www.twilio.com/docs/messaging/guides/sending-international-sms-guide)\nspecific to which countries you can send SMS based on the Twilio phone\nnumber you purchase, so please keep that in mind. In this example, I will be\nusing my personal U.S. phone number for this article as the recipient phone\nnumber, so, in this case, I will purchase a U.S. Twilio number. Just make\nsure your phone number has the SMS capability. Once selected, simply click\nthe \"Buy \u003Cphone number>\"  button.\n\n\n![twilio webhooks - image\n4](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750099023265.png)\n\n\nNext, we just need to make sure Twilio can send SMS to our recipient phone\nnumber by allowing Twilio Programmable Messaging to send SMS to the country\nour recipient phone number is associated with. To do so, head to __[United\nStates (US1) > Messaging > Settings > Geo permissions__ and make sure that\nthe country associated with the recipient's phone number is selected (for\nexample, as I am using my U.S. phone number as the recipient phone number in\nthis blog, I will select United States).\n\n\n![twilio webhooks - image\n5](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image11_aHR0cHM6_1750099023267.png)\n\n\nClick \"Save geo permissions.\" With that we're all set up to send SMS.\n\n\nNext, let's handle the processing of the webhook and the creation of our SMS\nalerts with Twilio Functions.\n\n\n### Set up Twilio Functions\n\n\nTo process the webhook we will be sending to Twilio, we need to define a\nTwilio Function. To do this, select **United States (US1) > Functions and\nAssets > Functions (Classic) > List** and click \"Create a Function.\" Select\nthe \"Hello SMS\" option in the pop-up and click \"Create.\"\n\n\n![Create a Twilio\nfunction](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750099023269.png)\n\n\nNow, let's go ahead and configure our Twilio Function.\n\n\n1. Extend the path for example `/handle-event-webhook`. In my case this\nwould result in the following path:\n`https://daff-mac-7354.twil.io/handle-event-webhook`.\n\n\n2. Disable the option `Check for valid Twilio signature`.\n\n\n3. Adjust the code to the following, making sure to update the values for\n`\u003Cyour personal phone number>` and `\u003Cyour Twilio Phone number>`:\n\n\n``` javascript\n\nexports.handler = function (context, event, callback) {\n  const twilioClient = context.getTwilioClient();\n\n  twilioClient.messages\n    .create({\n      body: `Hi there! There was an update to issue (${event[\"object_attributes\"][\"id\"]}) with title \"${event[\"object_attributes\"][\"title\"]}\" in project ${event[\"repository\"][\"name\"]}. It was just ${event[\"object_attributes\"][\"action\"]}.`,\n      to: \"\u003Cyour personal phone number>\",\n      from: \"\u003Cyour Twilio Phone number>\",\n    })\n    .then((message) => {\n      console.log(\"SMS successfully sent\");\n      console.log(message.sid);\n      return callback(null, `Success! Message SID: ${message.sid}`);\n    })\n    .catch((error) => {\n      console.error(error);\n      return callback(error);\n    });\n};\n\n\n```\n\n\nIt should end up looking like the following:\n\n  ![Configuration for Twilio function](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750099023271.jpg)\n\nNow, whenever our endpoint is hit, it should trigger an SMS with a custom\nmessage indicating a change to an existing issue which will represent an\nexample of the various [webhook\nevents](https://docs.gitlab.com/ee/user/project/integrations/webhook_events.html)\nwe can configure.\n\n\nNext, let's set our webhooks within GitLab to trigger this endpoint whenever\na change to an issue is made.\n\n\n### Set up GitLab webhooks\n\n\nLog in to your GitLab instance and go to the project you would like to\nconfigure event webhooks in.\n\n\nOnce in the Project, go to **Settings > Webhooks** and click on \"Add new\nwebhook.\"\n\n\n![Screen to add a new\nwebhook](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750099023273.png)\n\n\nYou will only need to configure the following fields:\n\n\n1. URL: This should be the endpoint we defined in the previous section. In\nthe previous example that would be\n`https://daff-mac-7354.twil.io/handle-event-webhook`.\n\n\n2. Trigger: In our case, we will be reacting to [issues\nevents](https://docs.gitlab.com/ee/user/project/integrations/webhook_events.html#issue-events),\nso check \"Issues events.\"\n\n\n![Configuring URL and trigger\nfields](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750099023274.png)\n\n\nWe're all set to test our setup!\n\n\n### Testing\n\n\nWhile in the project that was just configured to react to issues events,\nhead to \"Plan > Issues\" and click on \"New issue.\"\n\n\n![New issue\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750099023276.png)\n\n\nAdd a title and click on \"Create Issue.\"\n\n  ![Create issue screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099023/Blog/Content%20Images/Blog/Content%20Images/image10_aHR0cHM6_1750099023278.png)\n\nIf everything is configured correctly, you should get an SMS looking\nsomething like:\n\n\n`Sent from your Twilio trial account - Hi there! There was an update to\nissue (146735617) with title \"GitLab webhook example\" in project Webhooks\nExample. It was just opened.`\n\n\n## Expanding the use case\n\n\nWe've leveraged Twilio's SMS functionality in combination with GitLab\nwebhooks to instantly get feedback on new and existing issues within our\nproject, allowing us to react quickly to any changes that might occur. This\nsimple use case showed how one person could instantly get informed about a\nsingle type of event. However, often we want to inform more people about\nvarious events or be able to react to more than just one type of event (like\nissue creation and updates).\n\n\nThis functionality can be expanded by:\n\n\n1. Sending SMS alerts to multiple people: This can be achieved by extending\nthe Twilio Function to loop through a given array of phone numbers.\n[Twilio's Messaging Service](https://www.twilio.com/docs/messaging/services)\ncan be leveraged to potentially simplify the process of sending SMS to\nvarious phone numbers.\n\n\n2. Handling different event types: Select more types of webhook events in\nthe Project settings to react to other things like\n[comments](https://docs.gitlab.com/ee/user/project/integrations/webhook_events.html#comment-events),\n[deployments](https://docs.gitlab.com/ee/user/project/integrations/webhook_events.html#deployment-events),\nor\n[releases](https://docs.gitlab.com/ee/user/project/integrations/webhook_events.html#release-events).\n\n\n3. Configure on a group level: In this example, we’ve only configured\nwebhooks on a project level. However, if it is relevant to react to events\nacross projects on a group level, this can also be configured, removing the\nneed to change webhook settings for each project.\n\n\n4. Self-host message generation functionality: Leverage [Twilio Server Side\nSDKs](https://www.twilio.com/docs/libraries) instead of Twilio Functions to\nhost the code yourself. This could benefit you if you have restrictions on\nwhere you can host code as well as allow you to more easily connect with the\nrest of your code base likecfetching information from your database to get\nphone numbers for relevant people.\n\n\n> Start [a free trial of GitLab\nUltimate](https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/blog&glm_content=default-saas-trial)\ntoday to test-drive more DevSecOps features.\n","devsecops",[695,719,717,693,9],{"slug":1280,"featured":91,"template":698},"combine-gitlab-webhooks-and-twilio-for-sms-alerts-on-devsecops-platform","content:en-us:blog:combine-gitlab-webhooks-and-twilio-for-sms-alerts-on-devsecops-platform.yml","Combine Gitlab Webhooks And Twilio For Sms Alerts On Devsecops Platform","en-us/blog/combine-gitlab-webhooks-and-twilio-for-sms-alerts-on-devsecops-platform.yml","en-us/blog/combine-gitlab-webhooks-and-twilio-for-sms-alerts-on-devsecops-platform",{"_path":1286,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1287,"content":1293,"config":1299,"_id":1301,"_type":14,"title":1302,"_source":16,"_file":1303,"_stem":1304,"_extension":19},"/en-us/blog/comply-with-nist-secure-supply-chain-framework-with-gitlab",{"title":1288,"description":1289,"ogTitle":1288,"ogDescription":1289,"noIndex":6,"ogImage":1290,"ogUrl":1291,"ogSiteName":685,"ogType":686,"canonicalUrls":1291,"schema":1292},"Comply with NIST's secure software supply chain framework with GitLab","The U.S. government's Secure Software Development Framework has four key practices. GitLab's DevOps platform has features to address them all.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749667094/Blog/Hero%20Images/container-security.jpg","https://about.gitlab.com/blog/comply-with-nist-secure-supply-chain-framework-with-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Comply with NIST's secure software supply chain framework with GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sandra Gittlen\"}],\n        \"datePublished\": \"2022-03-29\",\n      }",{"title":1288,"description":1289,"authors":1294,"heroImage":1290,"date":1296,"body":1297,"category":693,"tags":1298},[1295],"Sandra Gittlen","2022-03-29","\nThe U.S. government, in March, released an update to its framework to secure agencies’ software supply chains, which are under [increasing risk of attack](https://www.whitehouse.gov/briefing-room/statements-releases/2022/03/21/statement-by-president-biden-on-our-nations-cybersecurity/). The National Institute of Standards and Technology (NIST) unveiled the Secure Software Development Framework (SSDF) 1.1, which calls for tighter controls throughout the software development lifecycle and describes a set of best practices for organizations – and their third-party suppliers – to follow.\n\nThe [SSDF](https://csrc.nist.gov/News/2022/nist-publishes-sp-800-218-ssdf-v11) focuses on how organizations can protect software supply chains, regardless of technology, platform, programming language, or operating environment, in large part by introducing security early in the DevOps process. There are four key practices:\n\n- prepare the organization\n\n- protect software (all components of the software should be safe from tampering and unauthorized access)\n\n- produce well-secured software (with minimal security vulnerabilities in its releases)\n\n- respond to vulnerabilities\n\n“The goal of the SSDF, in my opinion, is to bring all agencies and their suppliers to the same place in terms of secure software development,” says Joel Krooswyk, senior manager of Solutions Architecture at GitLab. “The framework gets everyone on the same page and speaking the same language, which will inevitably help them to be more effective against whatever threats may come.”\n\nWhile some agencies, such as the Department of Defense and Central Intelligence Agency, might be more sophisticated in the security and compliance of their software supply chains, other public sector organizations are less advanced, using a raft of ad-hoc legacy applications to manually handle vulnerabilities.\n\nThe SSDF undoubtedly will drive all government agencies to direct resources – human and technological – toward [automating supply chain security](/blog/gitlab-supply-chain-security/). To ensure that they meet the measure of the framework without overburdening their teams and budgets, organizations should consider deploying GitLab, a single DevOps platform that has security built in early in the development lifecycle, end-to-end, and with maximum visibility. \n\nHere’s how GitLab addresses the specific practices within the SSDF:\n\n**1. Prepare the organization**\n\nGitLab helps organizations ensure that their people, processes, and technology are prepared to perform security software development, in line with SSDF best practices.\n\nThe GitLab DevOps platform features:\n\n- Strong [policy management](https://docs.gitlab.com/ee/administration/compliance.html) and role-based permissions models with LDAP, single sign-on, and multifactor authentication support\n\n- [Sophisticated security dashboards](https://docs.gitlab.com/ee/user/application_security/security_dashboard/) with severity and trends to provide all stakeholders visibility and observability into the software development lifecycle \n\n- Scaled agile process support, which is enabled through epics and issues and other documentation, making for a completely auditable environment\n\n- Simplified implementation of a zero-trust security framework with the DevOps platform\n\n**2. Protect the software**\n\nThe SSDF guides organizations to protect all components of their software from tampering and unauthorized access.\n\nGitLab helps organizations accomplish this through the use of:\n\n- [source code management](https://about.gitlab.com/solutions/source-code-management/)\n\n- commit signatures\n\n- code reviews\n\n- [Hardened containers](/press/releases/2020-07-01-gitlab-announces-hardened-container-image-in-support-of-the-us-department-of-defense-enterprise-devsecops-initiative/)\n\n- role-based, read-only controls\n\n- [Merge-request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/) \n\n- [Software Bill of Materials (SBOM)](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#cyclonedx-software-bill-of-materials) data per release\n\n- security scanning in [offline environments](https://docs.gitlab.com/ee/user/application_security/offline_deployments/)\n\n**3. Produce well-secured software**\n\nAccording to the SSDF, organizations should produce well-secured software with minimal security vulnerabilities in its releases.\n\nThe GitLab DevOps platform is purpose-built for this best practice and includes:\n\n- credential management\n\n- code reviews and approvals\n\n- centralized mitigation with vulnerability reports\n\n- [security scanning](https://docs.gitlab.com/ee/user/application_security/) (DAST, SAST, fuzz testing, secret detection, and more) that is integrated into the developer workflow\n\n- [continuous compliance](/solutions/compliance/) enforcement capabilities that enable organizations to tailor their pipeline reviews and security scans to all their applicable compliance mandates\n\n- the ability to find and fix vulnerabilities early on in development without building complex integrations\n\n**4. Respond to vulnerabilities**\n\nThe SSDF wants organizations to be able to identify residual vulnerabilities in their software releases and respond appropriately to address those vulnerabilities and prevent similar ones from occurring in the future.\n\nGitLab enables organizations to find and fix vulnerabilities early in the development process. The GitLab DevOps platform also features:\n\n- automatic updates for the Common Vulnerabilities and Exposures (CVE) database\n\n- the ability to contribute/disclose vulnerabilities directly via GitLab\n\n- [Auto DevOps](https://docs.gitlab.com/ee/topics/autodevops/) best practice scanning\n\n- status, severity, and related activity exposed on the [Vulnerability Report](https://docs.gitlab.com/ee/user/application_security/vulnerability_report/) page\n\n- integrated learning tools to learn about found vulnerabilities in real-time\n\n- on-demand scanning to look for new vulnerabilities in existing code\n\nUsing GitLab's DevOps platform, government agencies, and their suppliers, can apply the best practices set forth in the SSDF and ensure the software supply chain meets the requirements of other mandates through [continuous compliance](/solutions/compliance/).\n\n[Try GitLab Ultimate for free](/solutions/public-sector/)\n",[951,9,693,186],{"slug":1300,"featured":6,"template":698},"comply-with-nist-secure-supply-chain-framework-with-gitlab","content:en-us:blog:comply-with-nist-secure-supply-chain-framework-with-gitlab.yml","Comply With Nist Secure Supply Chain Framework With Gitlab","en-us/blog/comply-with-nist-secure-supply-chain-framework-with-gitlab.yml","en-us/blog/comply-with-nist-secure-supply-chain-framework-with-gitlab",{"_path":1306,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1307,"content":1313,"config":1320,"_id":1322,"_type":14,"title":1323,"_source":16,"_file":1324,"_stem":1325,"_extension":19},"/en-us/blog/create-a-workspace-quickly-with-the-gitlab-default-devfile",{"title":1308,"description":1309,"ogTitle":1308,"ogDescription":1309,"noIndex":6,"ogImage":1310,"ogUrl":1311,"ogSiteName":685,"ogType":686,"canonicalUrls":1311,"schema":1312},"Create a workspace quickly with the GitLab default devfile","The GitLab default devfile makes it easier than ever to try out workspaces for new projects. Learn how to share developer environment configurations effortlessly with this tutorial.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097860/Blog/Hero%20Images/Blog/Hero%20Images/REFERENCE%20-%20display%20preview%20for%20blog%20images%20%281%29_2XDPsbkjQ3o6tcdom6IGxI_1750097859914.png","https://about.gitlab.com/blog/create-a-workspace-quickly-with-the-gitlab-default-devfile","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Create a workspace quickly with the GitLab default devfile\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Zhaochen Li\"}],\n        \"datePublished\": \"2025-02-27\",\n      }",{"title":1308,"description":1309,"authors":1314,"heroImage":1310,"date":1316,"body":1317,"category":717,"tags":1318},[1315],"Zhaochen Li","2025-02-27","Software development environments can be complex to set up and maintain. Developers often spend a significant amount of time configuring their local environments with the right dependencies, tools, and settings. GitLab aims to solve this by providing a default devfile that enables you to create workspaces and to start developing quickly.\n\n## GitLab Workspaces\n\nGitLab Workspaces provide isolated development environments for making changes to your GitLab projects without the complexity of setting up local dependencies. Workspaces ensure reproducible development setups, allowing developers to share their environment configurations effortlessly.\n\nBy default, GitLab Workspaces are configured to use the GitLab VS Code fork and include the GitLab Workflow extension. To learn more, visit [the GitLab Workspaces documentation](https://docs.gitlab.com/ee/user/workspace/).\n\n## Understand devfiles\n\nA [**devfile**](https://devfile.io/docs/2.2.0/devfile-ecosystem) is a YAML-based declarative configuration file that defines a project's development environment. It specifies the necessary tools, languages, runtimes, and other components required for development.\n\nPreviously, [setting up a workspace](https://about.gitlab.com/blog/quick-start-guide-for-gitlab-workspaces/) required a custom devfile at the root of the repository. For example, a `.devfile.yaml` file. A typical devfile looked like this:\n\n![typical default devfile](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097868/Blog/Content%20Images/Blog/Content%20Images/Screenshot_2025-02-26_at_8.15.58_AM_aHR0cHM6_1750097868229.png)\n\n## GitLab default devfile\n\nStarting in GitLab 17.9, a GitLab default devfile is available for all projects when creating a workspace. This eliminates the need to manually create a devfile before starting a workspace.\nHere is the content of the default devfile:\n\n![GitLab default devfile content](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097868/Blog/Content%20Images/Blog/Content%20Images/Screenshot_2025-02-26_at_8.16.20_AM_aHR0cHM6_1750097868230.png)\n\nWhen creating a workspace with the GitLab UI, the option **Use GitLab default devfile** is always available – regardless of whether custom devfiles exist in the repository. Simply select this option to start exploring GitLab Workspaces with one less setup step.\n\n![Use GitLab default devfile screenshot](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097868/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097868232.png)\n\n## Create your own custom devfiles\nWhile the GitLab default devfile provides a quick way to start a workspace, you may want to customize your development environment to better fit your project's needs. By creating a custom devfile, you can tailor your development environment with the exact tools, dependencies, and configurations needed for your workflow.\n\nConsider creating a custom devfile if you need to:\n\n- Add project-specific dependencies beyond the base development image.\n- Adjust CPU and memory resource limits.\n- Configure multiple containers for additional services like databases.\n- Define custom, project-specific, environment variables.\n- Set up specific port mappings.\n- Integrate specialized development tools like debuggers or language servers.\n\nFor more details, see the [Workspaces devfile documentation](https://docs.gitlab.com/ee/user/workspace/#devfile).\n\n## Read more\n\n- [Build and run containers in Remote Development workspaces](https://about.gitlab.com/blog/build-and-run-containers-in-remote-development-workspaces/)\n- [Use GitLab AI features out-of-the-box in a GitLab Workspace](https://about.gitlab.com/blog/use-gitlab-ai-features-out-of-the-box-in-a-gitlab-workspace/)\n- [Quickstart guide for GitLab Remote Development workspaces](https://about.gitlab.com/blog/quick-start-guide-for-gitlab-workspaces/)\n- [Enable secure sudo access for GitLab Remote Development workspaces](https://about.gitlab.com/blog/enable-secure-sudo-access-for-gitlab-remote-development-workspaces/)\n",[1319,9,695,719,717],"collaboration",{"slug":1321,"featured":6,"template":698},"create-a-workspace-quickly-with-the-gitlab-default-devfile","content:en-us:blog:create-a-workspace-quickly-with-the-gitlab-default-devfile.yml","Create A Workspace Quickly With The Gitlab Default Devfile","en-us/blog/create-a-workspace-quickly-with-the-gitlab-default-devfile.yml","en-us/blog/create-a-workspace-quickly-with-the-gitlab-default-devfile",{"_path":1327,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1328,"content":1334,"config":1340,"_id":1342,"_type":14,"title":1343,"_source":16,"_file":1344,"_stem":1345,"_extension":19},"/en-us/blog/data-driven-devsecops-exploring-gitlab-insights-dashboards",{"title":1329,"description":1330,"ogTitle":1329,"ogDescription":1330,"noIndex":6,"ogImage":1331,"ogUrl":1332,"ogSiteName":685,"ogType":686,"canonicalUrls":1332,"schema":1333},"Data-driven DevSecOps: Exploring GitLab Insights Dashboards","Learn how to leverage GitLab Insights Dashboards to visualize key metrics, track project progress, and boost team productivity with customizable, data-driven views.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097210/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2811%29_78Dav6FR9EGjhebHWuBVan_1750097210214.png","https://about.gitlab.com/blog/data-driven-devsecops-exploring-gitlab-insights-dashboards","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Data-driven DevSecOps: Exploring GitLab Insights Dashboards\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Ricardo Amarilla Villalba\"}],\n        \"datePublished\": \"2024-11-20\",\n      }",{"title":1329,"description":1330,"authors":1335,"heroImage":1331,"date":1337,"body":1338,"category":717,"tags":1339},[1336],"Ricardo Amarilla Villalba","2024-11-20","Metrics and analytics play a crucial role in driving productivity, quality,\nand success. GitLab, as a comprehensive DevSecOps platform, offers powerful\ntools for tracking and visualizing these vital metrics through its Insights\nDashboards. In this article, you'll learn how to use the Insights Dashboards\nin your environment.\n\n\n## Introduction to GitLab metrics and analytics \n\n\nGitLab provides an array of metrics and analytics tools that cover various\naspects of the DevSecOps lifecycle:\n\n\n1. [Productivity\nAnalytics](https://docs.gitlab.com/ee/user/analytics/productivity_analytics.html):\nTrack team velocity, cycle time, and lead time.  \n\n2. [Code Review\nAnalytics](https://docs.gitlab.com/ee/user/analytics/code_review_analytics.html):\nMeasure code quality, test coverage, and review efficiency.  \n\n3. [CI/CD\nAnalytics](https://docs.gitlab.com/ee/user/analytics/ci_cd_analytics.html):\nMonitor pipeline performance and deployment frequency.  \n\n4. [Value Stream\nAnalytics](https://docs.gitlab.com/ee/user/group/value_stream_analytics/):\nVisualize the flow of work from idea to production.  \n\n5. [Insights](https://docs.gitlab.com/ee/user/project/insights/): Explore\nand visualize data about your projects and groups.\n\n\nThese metrics offer invaluable insights into your development process,\nhelping teams identify bottlenecks, optimize workflows, and make data-driven\ndecisions.\n\n\n## Leveraging labels for specific metrics\n\n\nOne of GitLab's most powerful, yet understated features, is Labels, which\nallows you to filter and focus on specific metrics with pinpoint accuracy.\nBy strategically applying labels to issues, merge requests, and epics, you\ncan create custom views that provide targeted insights into your project's\nperformance and progress.\n\n\nLabels in GitLab act as versatile identifiers, allowing you to categorize\nand organize your work items with great flexibility. Whether you're tracking\nfeature development, bug fixes, or team-specific tasks, labels enable you to\nslice and dice your project data in ways that reveal meaningful patterns and\ntrends. This concept parallels the use of tags in cloud deployments, where\nresources are labeled for easier management, cost allocation, and\noperational insights.\n\n\nBy thoughtfully labeling your work items, you're essentially creating a\nsophisticated labeling system that can be leveraged to generate custom\ndashboards and reports. This approach empowers you to zoom in on the metrics\nthat matter most to your team or stakeholders, providing a clear and focused\nview of your project's health and momentum.\n\n\n## How to configure GitLab Insights\n\n\nGitLab Insights allow you to explore and visualize data about your projects\nand groups. They provide valuable analytics on various aspects such as\nissues created and closed during a specified period, average time for merge\nrequests to be merged, and triage hygiene. Insights can be configured for\nboth projects and groups.\n\n\nTo configure Insights:\n\n\n1. For project insights:  \n   * Create a file named `.gitlab/insights.yml` in the root directory of your project.  \n2. For group insights:  \n   * Create a `.gitlab/insights.yml` file in a project that belongs to your group.  \n   * Go to your group's **Settings > General**.  \n   * Expand the **Analytics section** and find the **Insights section**.  \n   * Select the project containing the configuration file and save changes.\n\nThe `.gitlab/insights.yml` file is a YAML file where you define the\nstructure and order of charts in a report, as well as the style of charts to\nbe displayed. Each chart definition includes parameters such as title,\ndescription, type, and query to specify the data source and filtering\nconditions.\n\n\nTo view insights, navigate to **Analyze > Insights** in your project or\ngroup.\n\n\n![View default Insights\nDashboard](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097218/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097217972.png)\n\n\n## Customize merge request insights\n\n\nWhile the default view provides valuable raw information, we can customize\nthe Insights Dashboard to uncover additional layers of information, such as\nwhich team was responsible for each merge request and what type of problem\neach one solved.\n\n\n## Merge request insights for each squad and requirement type\n\n\nMeasuring squad productivity in GitLab can be challenging, especially when\nthe GitLab group and subgroup structure doesn't align perfectly with your\nsquad organization. Here's how to overcome these challenges and effectively\ntrack squad productivity:\n\n\n### **Setting up squad-based metrics**\n\n\n1. **Label creation:** Create unique scope labels for each squad (e.g.,\n`squad::alpha`, `squad::beta`) and each requirement type (e.g., `type::bug`,\n`type::feature`, `type::maintenance`).\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/ZUOzORIUJeU?si=T8eHeGizS3blYFHB\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n2. **Label application:** Consistently apply these squad labels to all\nissues and merge requests handled by each squad, regardless of the project\nor group they're in.  \n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/fJ9entEBZG8?si=MlM6mKirEdkmwDDJ\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n**Hints:**  \n   * Use GitLab API to apply labels massively to existing open, merged, and closed MRs.  \n   * Add/remove/update labels as part of your GitLab CI pipeline.  \n   * Leverage the GitLab Triage Bot to automate the labeling process.  \n\n3. Dashboard setup: Create a `.gitlab/insights.yml` file in your project\nrepository with custom charts for team-specific and type-specific merge\nrequest insights.\n\n\n```\n\n\n## Default Merge Requests insights.yml \n\nmergeRequests:\n  title: Merge requests dashboard\n  charts:\n    - title: Merge requests merged per week \n      type: bar\n      query:\n        data_source: issuables\n        params:\n          issuable_type: merge_request\n          issuable_state: merged\n          group_by: week\n          period_limit: 12\n    - title: Merge requests merged per month\n      type: bar\n      query:\n        data_source: issuables\n        params:\n          issuable_type: merge_request\n          issuable_state: merged\n          group_by: month\n          period_limit: 3\n\n## Per-teams Merge Requests insights.yml\n\nmergeRequestsTeams:\n  title: Merge requests dashboard per teams\n  charts:\n    - title: Merge requests merged per week \n      type: stacked-bar\n      query:\n        data_source: issuables\n        params:\n          issuable_type: merge_request\n          issuable_state: merged\n          group_by: week\n          period_limit: 12\n          collection_labels:\n            - squad::alpha\n            - squad::beta\n    - title: Merge requests merged per month\n      type: stacked-bar\n      query:\n        data_source: issuables\n        params:\n          issuable_type: merge_request\n          issuable_state: merged\n          group_by: month\n          period_limit: 3\n          collection_labels:\n            - squad::alpha\n            - squad::beta\n\n## Per-teams and Type Merge Requests insights.yml\n\nmergeRequestsTeamsAndType:\n  title: Per Teams and Type - Merge requests dashboard\n  charts:\n    - title: Merge requests merged per week - Squad Alpha\n      type: stacked-bar\n      query:\n        data_source: issuables\n        params:\n          issuable_type: merge_request\n          issuable_state: merged\n          filter_labels: squad::alpha\n          collection_labels:\n            - type::feature\n            - type::bug\n            - type::maintenance\n          group_by: week\n          period_limit: 12\n    - title: Merge requests merged per month - Squad Alpha\n      type: stacked-bar\n      query:\n        data_source: issuables\n        params:\n          issuable_type: merge_request\n          issuable_state: merged\n          filter_labels: squad::alpha\n          collection_labels:\n            - type::feature\n            - type::bug\n            - type::maintenance\n          group_by: month\n          period_limit: 3\n    - title: Merge requests merged per week - Squad Beta\n      type: stacked-bar\n      query:\n        data_source: issuables\n        params:\n          issuable_type: merge_request\n          issuable_state: merged\n          filter_labels: squad::beta\n          collection_labels:\n            - type::feature\n            - type::bug\n            - type::maintenance\n          group_by: week\n          period_limit: 12\n    - title: Merge requests merged per month - Squad Beta\n      type: stacked-bar\n      query:\n        data_source: issuables\n        params:\n          issuable_type: merge_request\n          issuable_state: merged\n          filter_labels: squad::beta\n          collection_labels:\n            - type::feature\n            - type::bug\n            - type::maintenance\n          group_by: month\n          period_limit: 3\n\n```\n\n\nBy implementing these customizations, you can create insightful dashboards\nthat provide a clear view of merge request activity per team and requirement\ntype, allowing you to visualize trends over time, compare performance\nbetween squads, and analyze the distribution of different types of work for\neach squad. \n\n\n![dashboards with view of MR activity per team and requirement\ntype](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097218/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097217972.png)\n\n\n![dashboard comparing performance between\nsquads](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097218/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097217974.png)\n\n\n## Get started today\n\n\nGitLab Insights is just the tip of the iceberg when it comes to metrics and\nanalytics. To explore the full range of GitLab's powerful analytics\nfeatures, including Value Stream Analytics, CI/CD Analytics, and Code Review\nmetrics, check out our Value Stream Management product tour:\n\n\n[![Value Stream Management product\ntour](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097218/Blog/Content%20Images/Blog/Content%20Images/Screenshot_2024-11-20_at_12.28.08_PM_aHR0cHM6_1750097217976.png)](https://gitlab.navattic.com/vsm)\n\n\n> Ready to start your own metrics journey? Sign up for a [free trial\nof GitLab Ultimate\ntoday](https://gitlab.com/-/trials/new?glm_content=default-saas-trial&glm_source=about.gitlab.com%2F)\nand unlock the full potential of data-driven DevSecOps.\n\n\n## Read more\n\n- [Scheduled Reports Generation tool simplifies value stream\nmanagement](https://about.gitlab.com/blog/new-scheduled-reports-generation-tool-simplifies-value-stream-management/)\n\n- [Getting started with the new GitLab Value Streams\nDashboard](https://about.gitlab.com/blog/getting-started-with-value-streams-dashboard/)\n\n- [AI Impact analytics dashboard measures the ROI of\nAI](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/)\n",[109,9,717,695,719,930],{"slug":1341,"featured":91,"template":698},"data-driven-devsecops-exploring-gitlab-insights-dashboards","content:en-us:blog:data-driven-devsecops-exploring-gitlab-insights-dashboards.yml","Data Driven Devsecops Exploring Gitlab Insights Dashboards","en-us/blog/data-driven-devsecops-exploring-gitlab-insights-dashboards.yml","en-us/blog/data-driven-devsecops-exploring-gitlab-insights-dashboards",{"_path":1347,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1348,"content":1354,"config":1361,"_id":1363,"_type":14,"title":1364,"_source":16,"_file":1365,"_stem":1366,"_extension":19},"/en-us/blog/deploy-a-server-using-go-with-gitlab-google-cloud",{"title":1349,"description":1350,"ogTitle":1349,"ogDescription":1350,"noIndex":6,"ogImage":1351,"ogUrl":1352,"ogSiteName":685,"ogType":686,"canonicalUrls":1352,"schema":1353},"Deploy a server using Go with GitLab + Google Cloud","This tutorial shows how to use GitLab’s Google Cloud integration to deploy a Golang server in less than 10 minutes, helping developers become more independent and efficient.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098028/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945_fJKX41PJHKCfSOWw4xQxm_1750098028126.png","https://about.gitlab.com/blog/deploy-a-server-using-go-with-gitlab-google-cloud","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Deploy a server using Go with GitLab + Google Cloud\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Claire Champernowne\"},{\"@type\":\"Person\",\"name\":\"Noah Ing\"}],\n        \"datePublished\": \"2025-01-28\",\n      }",{"title":1349,"description":1350,"authors":1355,"heroImage":1351,"date":1358,"body":1359,"category":717,"tags":1360},[1356,1357],"Claire Champernowne","Noah Ing","2025-01-28","Deploying an application to the cloud often requires assistance from production or DevOps engineers. GitLab's Google Cloud integration empowers developers to handle deployments independently. In this tutorial, you'll learn how to deploy a server to Google Cloud in less than 10 minutes using Go. Whether you’re a solo developer or part of a large team, this setup allows you to deploy applications efficiently.\n\n## You'll learn how to:\n\n1. Create a new project in GitLab\n2. Create a Go server utilizing `main.go`\n3. Use the Google Cloud integration to create a Service account\n4. Use the Google Cloud integration to create Cloud Run via a merge request\n5. Access your newly deployed Go server\n6. Clean up your environment\n\n## Prerequisites\n\n- Owner access on a Google Cloud Platform project\n- Working knowledge of Golang\n- Working knowledge of GitLab CI\n- 10 minutes\n\n## Step-by-step Golang server deployment to Google Cloud\n\n### 1. Create a new blank project in GitLab.\n\nWe decided to call our project `golang-cloud-run` for simplicity.\n\n![Create a new blank project in GitLab](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098035/Blog/Content%20Images/Blog/Content%20Images/image9_aHR0cHM6_1750098035249.png)\n\n### 2. Create a server utilizing this `main.go` demo.\n\nFind the `main.go` demo [here](https://gitlab.com/demos/applications/golang-cloud-run).\n\n```\n// Sample run-helloworld is a minimal Cloud Run service.\npackage main\n\nimport (\n\t\"fmt\"\n\t\"log\"\n\t\"net/http\"\n\t\"os\"\n)\n\nfunc main() {\n\tlog.Print(\"starting server...\")\n\thttp.HandleFunc(\"/\", handler)\n\n\t// Determine port for HTTP service.\n\tport := os.Getenv(\"PORT\")\n\tif port == \"\" {\n\t\tport = \"8080\"\n\t\tlog.Printf(\"defaulting to port %s\", port)\n\t}\n\n\t// Start HTTP server.\n\tlog.Printf(\"listening on port %s\", port)\n\tif err := http.ListenAndServe(\":\"+port, nil); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}\n\nfunc handler(w http.ResponseWriter, r *http.Request) {\n\tname := os.Getenv(\"NAME\")\n\tif name == \"\" {\n\t\tname = \"World\"\n\t}\n\tfmt.Fprintf(w, \"Hello %s!\\n\", name)\n}\n```\n\n### 3. Use the Google Cloud integration to create a Service account.\n\nNavigate to **Operate \\> Google Cloud \\> Create Service account**.\n\n![Golang tutorial - image 2](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098036/Blog/Content%20Images/Blog/Content%20Images/image11_aHR0cHM6_1750098035250.png)\n\n### 4. Configure the region you would like the Cloud Run instance deployed to.\n\n![Golang tutorial - image10](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098035/Blog/Content%20Images/Blog/Content%20Images/image10_aHR0cHM6_1750098035252.png)\n\n### 5. Use the Google Cloud integration to configure Cloud Run via Merge Request.\n\n![Golang tutorial - image4](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098035/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750098035254.png)\n\n### 6. This will open a merge request. Immediately merge the MR.\n\n![Golang tutorial - image6](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098036/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750098035257.png)\n\nThis merge request adds a CI/CD deployment job to your pipeline definition. In our case, this is also creating a pipeline definition, as we didn’t have one before.\n\n**Note:** The CI/CD variables `GCP_PROJECT_ID`, `GCP_REGION`, `GCP_SERVICE_ACCOUNT`, `GCP_SERVICE_ACCOUNT_KEY` will all be automatically populated from the previous steps. \n\n![Golang tutorial - image7](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098035/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750098035259.png)\n\n### 7. Voila! Check your pipeline and you will see you have successfully deployed to Google Cloud Run utilizing GitLab CI.\n\n![Golang tutorial - image2](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098035/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098035261.png)\n\n\u003Cbr>\n\n![Golang tutorial - image3](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098035/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750098035262.png)\n\n## 8. Click the Service URL to view your newly deployed server.\n\nAlternatively, you can navigate to **Operate \\> Environments** to see a list of deployments for your environments.\n\n![Golang tutorial - image5](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098035/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750098035264.png)\n\nBy clicking on the environment called **main**, you’ll be able to view a complete list of deployments specific to that environment.\n\n![Golang tutorial - image8](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098035/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750098035265.png)\n\n## Next steps\n\nTo get started with developing your Go application, try adding another endpoint. For instance, in your `main.go` file, you can add a `/bye` endpoint as shown below (don’t forget to register the new handler function in main!):\n\n```\nfunc main() {\n\tlog.Print(\"starting server...\")\n\n\thttp.HandleFunc(\"/\", handler)\n\thttp.HandleFunc(\"/bye\", byeHandler)\n```\n\n```\nfunc byeHandler(w http.ResponseWriter, r *http.Request) {\n\tname := os.Getenv(\"NAME\")\n\tif name == \"\" {\n\t\tname = \"World\"\n\t}\n\tfmt.Fprintf(w, \"Bye %s!\\n\", name)\n}\n```\n\nYour `main.go` file should now look something like this:\n\n```\n// Sample run-helloworld is a minimal Cloud Run service.\npackage main\n\nimport (\n\t\"fmt\"\n\t\"log\"\n\t\"net/http\"\n\t\"os\"\n)\n\nfunc main() {\n\tlog.Print(\"starting server...\")\n\n\thttp.HandleFunc(\"/\", handler)\n\n\thttp.HandleFunc(\"/bye\", byeHandler)\n\n\t// Determine port for HTTP service.\n\tport := os.Getenv(\"PORT\")\n\tif port == \"\" {\n\t\tport = \"8080\"\n\t\tlog.Printf(\"defaulting to port %s\", port)\n\t}\n\n\t// Start HTTP server.\n\tlog.Printf(\"listening on port %s\", port)\n\tif err := http.ListenAndServe(\":\"+port, nil); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}\n\nfunc handler(w http.ResponseWriter, r *http.Request) {\n\tname := os.Getenv(\"NAME\")\n\tif name == \"\" {\n\t\tname = \"World\"\n\t}\n\tfmt.Fprintf(w, \"Hello %s!\\n\", name)\n}\n\nfunc byeHandler(w http.ResponseWriter, r *http.Request) {\n\tname := os.Getenv(\"NAME\")\n\tif name == \"\" {\n\t\tname = \"World\"\n\t}\n\tfmt.Fprintf(w, \"Bye %s!\\n\", name)\n}\n```\n\nPush the changes to the repo, and watch the `deploy-to-cloud-run job` deploy the updates. Once it’s complete, go back to the Service URL and navigate to the `/bye` endpoint to see the new functionality in action.\n\n## Clean up the environment\n\nTo prevent incurring charges on your Google Cloud account for the resources used in this tutorial, you can either delete the specific resources or delete the entire Google Cloud project. For detailed instructions, refer to the [cleanup guide](https://docs.gitlab.com/ee/tutorials/create_and_deploy_web_service_with_google_cloud_run_component/#clean-up).\n\n> Discover more tutorials like this in our [Solutions Architecture](https://about.gitlab.com/blog/tags/solutions-architecture/) area.\n",[951,9,719,930,717,695],{"slug":1362,"featured":6,"template":698},"deploy-a-server-using-go-with-gitlab-google-cloud","content:en-us:blog:deploy-a-server-using-go-with-gitlab-google-cloud.yml","Deploy A Server Using Go With Gitlab Google Cloud","en-us/blog/deploy-a-server-using-go-with-gitlab-google-cloud.yml","en-us/blog/deploy-a-server-using-go-with-gitlab-google-cloud",{"_path":1368,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1369,"content":1375,"config":1383,"_id":1385,"_type":14,"title":1386,"_source":16,"_file":1387,"_stem":1388,"_extension":19},"/en-us/blog/developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd",{"title":1370,"description":1371,"ogTitle":1370,"ogDescription":1371,"noIndex":6,"ogImage":1372,"ogUrl":1373,"ogSiteName":685,"ogType":686,"canonicalUrls":1373,"schema":1374},"GitLab Duo: AI-powered CI/CD pipeline root cause analysis","Discover how we've infused Root Cause Analysis with AI to help remedy broken CI/CD pipelines, including example scenarios and take-away exercises.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097321/Blog/Hero%20Images/Blog/Hero%20Images/blog-hero-banner-1-0178-820x470-fy25_7JlF3WlEkswGQbcTe8DOTB_1750097321081.png","https://about.gitlab.com/blog/developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Developing GitLab Duo: Blending AI and Root Cause Analysis to fix CI/CD pipelines\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Rutvik Shah\"},{\"@type\":\"Person\",\"name\":\"Michael Friedrich\"}],\n        \"datePublished\": \"2024-06-06\",\n      }",{"title":1376,"description":1371,"authors":1377,"heroImage":1372,"date":1380,"body":1381,"category":801,"tags":1382},"Developing GitLab Duo: Blending AI and Root Cause Analysis to fix CI/CD pipelines",[1378,1379],"Rutvik Shah","Michael Friedrich","2024-06-06","___Generative AI marks a monumental shift in the software development industry, making it easier to develop, secure, and operate software. Our new blog series, written by our product and engineering teams, gives you an inside look at how we create, test, and deploy the AI features you need integrated throughout the enterprise. Get to know new capabilities within GitLab Duo and how they will help DevSecOps teams deliver better results for customers.___\n\nHave you ever encountered a broken [CI/CD](https://about.gitlab.com/topics/ci-cd/benefits-continuous-integration/) pipeline and had to halt your DevSecOps workflow, or even delay software deployment, as you try to figure out the root cause? Traditionally, when something goes wrong in the process of creating software, developers have to troubleshoot, dig through log files, and often do a lot of trial and error development. [GitLab Duo Root Cause Analysis](https://about.gitlab.com/gitlab-duo/), part of our suite of AI-powered features, removes the guesswork by determining the root cause for a failed CI/CD pipeline. In this article, you'll learn what Root Cause Analysis is and how to apply the AI-powered GitLab Duo feature to your DevSecOps workflow.\n\n> Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Watch today!](https://about.gitlab.com/seventeen/)\n\n### What is Root Cause Analysis?\n\nGitLab Duo Root Cause Analysis is an AI-powered feature that assists you in determining a root cause and suggesting a fix for a CI/CD job log failure by analyzing the logs.\n\nWhile Root Cause Analysis is often seen in product incident management, its workflows and debugging practices can be found in any DevSecOps workflow. Ops teams, administrators, and platform engineers are challenged by infrastructure-as-code (IaC) deployment errors, Kubernetes and GitOps problems, and long stack traces while investigating pipeline failures.\n\nGitLab Duo Root Cause Analysis keeps everyone in the same interface and uses AI-powered help to summarize, analyze, and propose fixes so that organizations can release secure software faster.\n\nA pipeline can encounter failures for a variety of reasons, including syntax errors in the code, missing dependencies that the pipeline relies on, test failures during the build process, Kubernetes and IaC deployment timeouts, and numerous other potential issues. When such failures occur, it becomes the responsibility of everyone to meticulously review the logs generated by the pipeline. This job log review process involves scrutinizing the detailed output to identify the specific errors and pinpoint the root cause of the pipeline failure. For example, the following pipeline has multiple job failures that need to be investigated and fixed.\n\n![Image depicting multiple job failures](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097332/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097332601.png)\n\nThe duration required to fix these failures can vary significantly and is largely influenced by several factors such as:\n- the developer's familiarity with the project\n- their level of experience in dealing with similar issues\n- their overall skill level in troubleshooting and problem-solving within the context of the pipeline.\n\nManual analysis can be exceedingly challenging and time-consuming, given that log data consists of application logs and system messages with a wide variety of potential sources of failures. A typical pipeline fix can consist of several iterations and context switching. The complexity and the unstructured nature of the logs is a perfect fit for speeding up the task using generative AI.  Using AI can reduce the time to identify and fix a pipeline error significantly and also lower the barrier of expertise that would be needed to fix a pipeline such as the above.\n\nWatch GitLab Duo Root Cause Analysis in action:\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n\n \u003Ciframe src=\"https://www.youtube.com/embed/sTpSLwX5DIs?si=J6-0Bf6PtYjrHX1K\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n### How does Root Cause Analysis work?\n\n[Root Cause Analysis](https://docs.gitlab.com/ee/user/ai_experiments.html#root-cause-analysis) works by forwarding a portion of the CI/CD job log to the [GitLab AI Gateway](https://docs.gitlab.com/ee/architecture/blueprints/ai_gateway/). GitLab ensures that the portion sent will fit inside the large language model (LLM) token limits alongside a prompt that has been pre-crafted to provide insights into why the job might have failed. The prompt also instructs the LLM to provide an example of how a user might fix a broken job.\n\nHere are two example scenarios where Root Cause Analysis can provide assistance.\n\n#### 1. Analyze a Python dependency error\n\nA Python application can import package modules with functionality that is not provided in the standard library. The project [Challenge - Root Cause Analysis - Python Config](https://gitlab.com/gitlab-da/use-cases/ai/ai-workflows/gitlab-duo-challenges/root-cause-analysis/challenge-root-cause-analysis-python-config) implements an application that parses configuration and initializes an SQLite database, which both work well without any dependencies. It uses best practices in CI/CD with a Python environment and caching. The latest feature implementation adds a Redis caching client, and now the CI/CD build is failing for some reason. \n\nBy using Root Cause Analysis, you can immediately learn that the `ModuleNotFoundError` text means that the module is actually not installed in the Python environment. GitLab Duo also suggests an example fix: Installing the Redis module through the PIP package manager. \n\n![Image depicting 'modulenotfounderror' and GL Duo suggested resolution](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097332/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097332602.png)\n\nThe failing pipeline can be viewed [here](https://gitlab.com/gitlab-da/use-cases/ai/ai-workflows/gitlab-duo-challenges/root-cause-analysis/challenge-root-cause-analysis-python-config/-/jobs/6992716398). \n\nThe Root Cause Analysis prompt provides a summary of the problem, which seems to be a problem with a missing `redis` module. Let's try to fix the problem by installing the `redis` module. You can either call `pip install redis` in the CI/CD job `script` section, or use a more sophisticated approach with the `requirements.txt` file. The latter is useful for a single source of truth for dependencies installed in the development environment and CI/CD pipelines.\n\n```yaml\ntest:\n  extends: [.python-req]\n  stage: test \n  before_script:\n    # [🦊] hint: Root cause analysis.\n    # Solution 1: Install redis using pip\n    - pip install redis\n    # Solution 2: Add redis to requirements.txt, use pip\n    - pip install -r requirements.txt \n\n  script:\n    - python src/main.py\n```\n\nAfter fixing the missing Python dependency, the CI/CD job fails again. Use Root Cause Analysis again to learn that no Redis service is running in the job. Switch to using GitLab Duo Chat and use the prompt `How to start a Redis service in CI/CD` to learn how to configure the `services` attribute in the CI/CD job.\n\n![Depicts the prompt for how to start a Redis service](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097333/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750097332602.png)\n\nModify the `.gitlab-ci.yml` with the `test` job, and specify the `redis` service.\n\n```yaml\ntest:\n  extends: [.python-req]\n  stage: test \n  before_script:\n    # [🦊] hint: Root cause analysis.\n    # Solution 1: Install redis using pip\n    - pip install redis\n    # Solution 2: Add redis to requirements.txt, use pip\n    - pip install -r requirements.txt \n\n  script:\n    - python src/main.py\n\n  # Solution 3 - Running Redis\n  services:\n    - redis\n```\n\nRunning the Redis server allows you to successfully execute the Python application, and print its output into the CI/CD job log.\n\n![output of Python application](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097332/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097332603.png)\n\nThe solution is provided in the [solution/ directory](https://gitlab.com/gitlab-da/use-cases/ai/ai-workflows/gitlab-duo-challenges/root-cause-analysis/challenge-root-cause-analysis-python-config/-/tree/main/solution?ref_type=heads).\n\n**Tip:** You can also ask [GitLab Duo Chat](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html) to follow up on potential future problems:\n\n```markdown\nHow to lint Python code? Which tools are recommended for CI/CD.\nHow to pin a package version in Python requirements file?\t\nWhat are possible ways that this exception stacktrace is triggered in the future?\nAre there ways to prevent the application from failing?\n``` \n\nThe next example is more advanced and includes multiple failures. \n\n#### 2. Analyze missing Go runtime\n\nCI/CD jobs can be executed in containers, spawned from the contributed `image` attribute. If the container does not provide a programming language runtime, the executed `script` sections referencing the `go` binary fail. For example, the error message `/bin/sh: eval: line 149: go: not found` needs to be understood and fixed. \n\nIf the `go` command is not found in the container's runtime context, this can have multiple reasons:\n\n1. The job uses a minimal container image, for example `alpine`, and the Go language runtime was not installed.\n1. The job uses the wrong default container image, for example, specified on top of the CI/CD configuration, or using the `default` keyword.\n1. The job does not use a container image but the shell executor. The host operating system does not have the Go language runtime installed, or it is otherwise broken/not configured.\n\nThe project [Challenge - Root Cause Analysis - Go GitLab Release Fetcher](https://gitlab.com/gitlab-da/use-cases/ai/ai-workflows/gitlab-duo-challenges/root-cause-analysis/challenge-root-cause-analysis-go-gitlab-release-fetcher) provides an exercise challenge to analyze and fix CI/CD problems with a GitLab release fetcher application, written in Go. The `build` and `docker-build` CI/CD jobs are failing. Fixing the problem requires different scopes: Understanding why the Go runtime is not installed, and learning about the `Dockerfile` syntax. \n\n![Screenshot showing Change Docker Label job failed](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097332/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750097332603.png)\n\nThe [`solution/` directory](https://gitlab.com/gitlab-da/use-cases/ai/ai-workflows/gitlab-duo-challenges/root-cause-analysis/challenge-root-cause-analysis-go-gitlab-release-fetcher) provides two possible solutions after Root Cause Analysis. \n\n## Practice using Root Cause Analysis\n\nHere are some scenarios to use to practice using Root Cause Analysis.\n\n- When you are running into Kubernetes deployment errors or timeouts. \n\n- With OpenTofu or Terraform IaC pipelines failing to provision your cloud resources.\n\n- When the Ansible playbook fails with a cryptic permission error in CI/CD.\n\n- When the Java stack trace is 10 pages long.\n\n- With a shell script highlighting an execution error.\n\n- When a Perl script fails in a single line, which is the only line in the script.\n\n- When the CI/CD job times out and it is unclear which section would cause this.\n\n- When a network connection timeout is reached, and you think it cannot be DNS.\n\n### What is next for GitLab Duo Root Cause Analysis?\n\nWe want to help our users to get their pipelines back to passing in fewer iterations. The Root Cause Analysis will open and show the response in GitLab Duo Chat, our AI assistant. Users can build on the recommendation to generate a more precise fix by asking specific questions (e.g., programming language-specific fixes) or asking for alternative fixes based on the root cause.\n\nFor example, here is the Root Cause Analysis for a failing job:\n\n![Root Cause Analysis response](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097332/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097332603.png)\n\nUsers can ask follow-up questions that build upon the AI-generated response.\n\n- I do not want to create my own Docker image. Please explain different ways to fix the problem.\n\n- I don't have access to the Docker image creation. It seems that the Go binary is missing. Are there alternative images you can suggest?\n\nGitLab also will be running quality benchmarks for the generated responses and shipping usability improvements.\n\nPlease see our [Root Cause Analysis GA epic](https://gitlab.com/groups/gitlab-org/-/epics/13080) for more details. We would also love your feedback on the feature. Please leave a comment on our [Root Cause Analysis feedback issue](https://gitlab.com/groups/gitlab-org/-/epics/13872).\n\n## Get started with Root Cause Analysis\n\nPlease see our [documentation](https://docs.gitlab.com/ee/user/ai_experiments.html#root-cause-analysis) on how to enable the feature available to our GitLab Ultimate customers. Also, GitLab Duo Root Cause Analysis will soon be coming to GitLab self-managed and GitLab Dedicated.\n\nNot a GitLab Ultimate customer? Start [a free trial](https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/blog&glm_content=default-saas-trial) today. \n\n## Read more of our \"Developing GitLab Duo\" series\n\n- [Developing GitLab Duo: How we validate and test AI models at scale](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/)\n\n- [Developing GitLab Duo: AI Impact analytics dashboard measures the ROI of AI](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/)\n\n- [Developing GitLab Duo: How we are dogfooding our AI features](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/)\n\n- [Developing GitLab Duo: Secure and thoroughly test AI-generated code](https://about.gitlab.com/blog/how-gitlab-duo-helps-secure-and-thoroughly-test-ai-generated-code/)",[803,719,951,9,695],{"slug":1384,"featured":91,"template":698},"developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd","content:en-us:blog:developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd.yml","Developing Gitlab Duo Blending Ai And Root Cause Analysis To Fix Ci Cd","en-us/blog/developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd.yml","en-us/blog/developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd",{"_path":1390,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1391,"content":1397,"config":1403,"_id":1405,"_type":14,"title":1406,"_source":16,"_file":1407,"_stem":1408,"_extension":19},"/en-us/blog/developing-gitlab-duo-how-we-are-dogfooding-our-ai-features",{"title":1392,"description":1393,"ogTitle":1392,"ogDescription":1393,"noIndex":6,"ogImage":1394,"ogUrl":1395,"ogSiteName":685,"ogType":686,"canonicalUrls":1395,"schema":1396},"Developing GitLab Duo: How we are dogfooding our AI features","As part of our blog series, we share real-world examples of how we integrate AI throughout our software development lifecycle and how we use metrics to gauge their success.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098360/Blog/Hero%20Images/Blog/Hero%20Images/blog-hero-banner-1-0178-820x470-fy25_7JlF3WlEkswGQbcTe8DOTB_1750098360821.png","https://about.gitlab.com/blog/developing-gitlab-duo-how-we-are-dogfooding-our-ai-features","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Developing GitLab Duo: How we are dogfooding our AI features\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"David O'Regan\"}],\n        \"datePublished\": \"2024-05-20\",\n      }",{"title":1392,"description":1393,"authors":1398,"heroImage":1394,"date":1400,"body":1401,"category":801,"tags":1402},[1399],"David O'Regan","2024-05-20","***Generative AI marks a monumental shift in the software development industry, making it easier to develop, secure, and operate software. Our new blog series, written by our product and engineering teams, gives you an inside look at how we create, test, and deploy the AI features you need integrated throughout the enterprise. Get to know new capabilities within GitLab Duo and how they will help DevSecOps teams deliver better results for customers.***\n\n[GitLab Duo](https://about.gitlab.com/gitlab-duo/), our suite of AI-powered features, has transformed our internal engineering workflows, driving efficiency gains across our development process. As strong proponents of dogfooding and transparency, we wanted to showcase how our teams leverage AI, including standouts like GitLab Duo Code Suggestions and GitLab Duo Chat, daily to streamline development processes, reduce manual effort, and enhance productivity. You'll learn about the benefits we've experienced for highly technical teams like engineering to less technical teams such as technical writing and product management.\n\n> Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Watch today!](https://about.gitlab.com/seventeen/)\n\n## Real-world use cases\n\nOur teams have integrated [GitLab Duo's many features](https://about.gitlab.com/gitlab-duo/#features) into their daily routines. Here are some examples of how GitLab Duo is helping them carry out everyday activities.\n\n### Summarization and documentation\n- **Streamline the code review process:** Staff Backend Developer [Gosia Ksionek](https://about.gitlab.com/company/team/#mksionek) showcases the practical benefits of AI in her workflow by using GitLab Duo to streamline the code review process. She effectively utilizes GitLab Duo to [summarize merge requests](https://youtu.be/3SIhe8dgFEc), making it easier and faster to review code changes. In addition to summarizing merge requests, Gosia also leverages GitLab Duo to [answer coding questions](https://www.youtube.com/watch?v=6n0I53XsjTc) and [explain complex code snippets](https://www.youtube.com/watch?v=3m2YRxa1SCY). This enhances her productivity and helps her better understand and manage intricate codebases. Through these demonstrations, Gosia highlights how GitLab Duo can significantly improve efficiency and clarity in the development process, making it an invaluable tool for developers.\n\n\u003Ccenter>\n\nWatch Gosia use GitLab Duo Merge Request Summary:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/3SIhe8dgFEc?si=Q8JG3Ix3K_THhbpv\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\nWatch Gosia use GitLab Duo to answer coding questions: \n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/6n0I53XsjTc?si=LA9VBHrgXpfJImSL\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\nWatch Gosia use GitLab Duo to explain complex code snippets:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/3m2YRxa1SCY?si=oms3szKwZoz-4yeq\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n\u003C/center>\n\n- **Condense comment threads:** [Bartek Marnane](https://about.gitlab.com/company/team/#bmarnane), Vice President of Expansion Software Development, uses GitLab Duo to condense lengthy comment threads into concise summaries, ensuring all relevant details are captured when updating issue descriptions.\n\n- **Create new documentation:** [Taylor McCaslin](https://about.gitlab.com/company/team/#tmccaslin), Group Manager, Product - Data Science Section, leveraged GitLab Duo to [create new documentation for GitLab Duo itself](https://docs.gitlab.com/ee/user/ai_features.html), exemplifying a meta use case that enhances clarity and consistency and greatly reduces the time to document new features.\n\n- **Craft release notes:** [Amanda Rueda](https://about.gitlab.com/company/team/#amandarueda), Senior Product Manager for Product Planning, uses GitLab Duo to [craft brief, impactful summaries for release notes](https://gitlab.com/groups/gitlab-org/-/epics/10267), highlighting changes and their value to users. By using well-crafted prompts like below, Amanda supercharges her workflow and ensures that each release note is clear, concise, and user-focused, enhancing the overall communication and user experience:\u003Cbr>\u003Cbr>\n*“Please create a two sentence summary of this change, which can be used for our release notes. The tone should be conversational and should be in second person. The summary should include a description of the problem or change and be tied to the value we are creating for you, the user.”*\n\u003Cbr>\u003Cbr>\n    - Here are some examples of release notes co-created with GitLab Duo:\n      - [Expanded options for sorting your Roadmap](https://gitlab.com/gitlab-org/gitlab/-/issues/460492)\n      - [Issue Board Clarity now with Milestone & Iteration](https://gitlab.com/gitlab-org/gitlab/-/issues/25758)\n      - [Design Management Features Extended to Product Teams](https://gitlab.com/gitlab-org/gitlab/-/issues/438829)\n\n- **Optimize docs site navigation:** [Suzanne Selhorn](https://about.gitlab.com/company/team/#sselhorn), Staff Technical Writer, tapped GitLab Duo to [optimize the left navigation of documentation](https://docs.gitlab.com/ee/user/get_started/get_started_projects.html) by providing a workflow-based order of pages. Suzanne provided a list of features to GitLab Duo, which generated the optimal order, updating the left navigation to match. GitLab Duo also drafted the [Getting Started](https://docs.gitlab.com/ee/user/get_started/get_started_planning_work.html) documentation much faster than were she to use traditional, manual approaches.\n\n### Goal setting and team alignment\n- **Draft and refine OKRs:** [François Rosé](https://about.gitlab.com/company/team/#francoisrose), Engineering manager, Create:Code Review Backend, finds [GitLab Duo Chat](https://about.gitlab.com/blog/gitlab-duo-chat-now-generally-available/) invaluable for drafting and refining OKRs. By articulating objectives more clearly and effectively, François enhances goal setting and team alignment. Using Chat, François ensures that each OKR is precise, actionable, and aligned with the team's goals, thereby improving overall team performance and cohesion. Here is an example prompt he uses:\u003Cbr>\u003Cbr>\n\n    *\"Here is an OKR I am thinking of creating:*\n\n    *Objective: Retrospect on retrospectives, to foster a thriving team*\n\n    *KR: Measure retrospective satisfaction from 100% of team members*\n\n    *KR: Identify 3 improvements to the async retrospectives*\n\n    *KR: Implement 1 improvement*\n\n    *Please provide direct feedback on how to improve the formulation of this objective and these key results.\"*\n\u003Cbr>\u003Cbr>\n\n- **Streamlined hiring and recruitment processes:** Chat helped [Denys Mishunov](https://about.gitlab.com/company/team/#dmishunov), Staff Frontend Engineer, formulate a clear and concise text for updating the email template for technical interview candidates. The team collaborated on refining the communication to ensure candidates receive all necessary information using a merge request. This example showcased the practical application of AI tools in enhancing communication processes within the hiring workflow.\n\n### Incident response and configuration\n- **Summarize production incidents:** [Steve Xuereb](https://about.gitlab.com/company/team/#sxuereb), Staff Site Reliability Engineer, employs GitLab Duo to summarize production incidents and create detailed incident reviews, streamlining the documentation process.\n\n- **Create boilerplate `.gitlab-ci.yml` files:**  Steve also uses Chat to create boilerplate `.gitlab-ci.yml` files, which significantly sped up his workflow. [Chat](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html) serves as a valuable partner for suggesting ideas. Additionally, [Code Explanation](https://docs.gitlab.com/ee/user/ai_features.html#code-explanation) provides detailed answers that are helpful during incidents, enhancing his productivity and understanding of the codebase.\n\n### Code generation and testing\n- **Full-stack development:** [Peter Hegman](https://about.gitlab.com/company/team/#peterhegman), Senior Frontend Engineer, has been using [Code Suggestions for his JavaScript and Ruby development](https://gitlab.com/gitlab-org/gitlab/-/issues/435783#note_1731321963). This highlights that Code Suggestions has become a powerful tool for developers moving across a full technical stack. \n\n- **Generate Python scripts:** Denys conducted [an experiment using GitLab Duo for a non-GitLab task](https://gitlab.com/gitlab-org/ai-powered/ai-framework/ai-experimentation). This example highlights the flexibility and utility of our AI tools beyond typical software development tasks.\n\n\u003Ccenter>\nWatch how Denys uses GitLab Duo to generate Python scripts to fetch content data and store it locally:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/30ZTtk4K5yU?si=p5ZcFLg6dTZL5gFE\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n\u003C/center>\n\n### Research and support\n- **Generate test source code:**  [Michael Friedrich](https://about.gitlab.com/company/team/#dnsmichi), Senior Developer Advocate, uses GitLab Duo to generate test source code for CI/CD components. This approach has been shared in various talks and presentations, such as the recent Open Source @ Siemens event ([public slides](https://go.gitlab.com/duA2Fc)). Using GitLab Duo in this manner helps ensure that the code is consistent, well-documented, and aligned with our best practices. Check out his [Rust example](https://gitlab.com/components/rust#contributing).\n\n![Rust example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098367/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098367547.png)\n\n- **Streamline research tasks:** Our team members consistently turn to Chat when they have questions about GitLab features, streamlining their research and support tasks. Michael shared, \"When I have a question about GitLab features, I default to using Chat instead of opening 100 browser tabs. This workflow helps me assist users on our community forum efficiently. For instance, I recently [helped a user with SSH deployment](https://forum.gitlab.com/t/how-to-make-ssh-deployment-more-clear-in-gitlab/102051/4?u=dnsmichi) using this method.\" Using Chat not only saves time but also provides quick, accurate information, enhancing the support we offer to our community.\n\n### Feature testing\n- **Test new features:** Our engineers use GitLab Duo to test new features like [Markdown support in Code Suggestions](https://gitlab.com/gitlab-org/gitlab/-/issues/443365). One of our team members noted, \"I need to test Markdown support in Code Suggestions for writing blog posts and GitLab docs in VS Code. I saw it was merged for 17.0.\" By testing these features internally, we ensure they meet our quality standards before release.\n\n### Understanding external codebases\n- **Explain external projects:** GitLab Duo's `/explain` feature is particularly useful for understanding external projects imported into GitLab. This capability was highlighted in a recent livestream he did with open source expert Eddie Jaoude. Michael let us know, \"I use `/explain` on external projects to understand the source code. I pitched this idea for learning about open source projects, dependencies, etc. during the livestream.\" This feature is invaluable for developers who need to quickly grasp the functionality and dependencies of unfamiliar codebases, significantly improving their efficiency and understanding.\n\n\u003Ccenter>\nWatch Michael demo `/explain` during a livestream with Eddie Jaoude:\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/L2Mx8hOhkEE?si=R7W3v4EDqeJCaPOw\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n\u003C/center>\n\n## GitLab Duo's benefits\n\nThe integration of GitLab Duo has brought about numerous positive impacts, significantly enhancing our engineering and product development workflows:\n\n- Many tasks that previously required manual intervention are now automated, freeing up valuable time for our engineers. For example, summarizing long threads and creating boilerplate code are now more efficient, allowing our team to focus on more complex issues.\n- The time taken to document and summarize issues has decreased, allowing for quicker information dissemination and decision-making.\n- With AI-assisted code suggestions and explanations, our teams produce higher quality code with fewer errors and faster debugging processes. The integration of GitLab Duo into incident reviews and coding assistance has led to more efficient and effective code reviews.\n- Administrative tasks, such as drafting OKRs and creating release notes, have been streamlined. \n\nGitLab Duo has helped to not only improve our efficiency but also to enhance the quality and speed of our development processes, illustrating the transformative power of AI in software development.\n\n## What's next?\n\nWe are committed to further integrating AI into our workflows and continuously improving GitLab Duo features based on internal feedback and evolving needs. The ongoing collection of use cases and metrics with the [AI Impact analytics dashboard](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/) will guide enhancements and ensure that GitLab Duo remains at the forefront of AI-driven development tools.\n\n![Dogfooding Duo - AI analytics dashboard](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098367/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098367547.png)\n\n> [Get started using GitLab Duo today with our free trial.](https://about.gitlab.com/gitlab-duo/#free-trial)\n\n## Read more \"Developing GitLab Duo\"\n\n- [Developing GitLab Duo: AI Impact analytics dashboard measures the ROI of AI](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/)\n- [Developing GitLab Duo: How we validate and test AI models at scale](https://about.gitlab.com/blog/developing-gitlab-duo-how-we-validate-and-test-ai-models-at-scale/)\n- [Developing GitLab Duo: Secure and thoroughly test AI-generated code](https://about.gitlab.com/blog/how-gitlab-duo-helps-secure-and-thoroughly-test-ai-generated-code/)\n- [Developing GitLab Duo: Blending AI and Root Cause Analysis to fix CI/CD pipelines](https://about.gitlab.com/blog/developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd/)",[803,804,695,9,888],{"slug":1404,"featured":91,"template":698},"developing-gitlab-duo-how-we-are-dogfooding-our-ai-features","content:en-us:blog:developing-gitlab-duo-how-we-are-dogfooding-our-ai-features.yml","Developing Gitlab Duo How We Are Dogfooding Our Ai Features","en-us/blog/developing-gitlab-duo-how-we-are-dogfooding-our-ai-features.yml","en-us/blog/developing-gitlab-duo-how-we-are-dogfooding-our-ai-features",{"_path":1410,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1411,"content":1417,"config":1424,"_id":1426,"_type":14,"title":1427,"_source":16,"_file":1428,"_stem":1429,"_extension":19},"/en-us/blog/developing-gitlab-duo-how-we-validate-and-test-ai-models-at-scale",{"title":1412,"description":1413,"ogTitle":1412,"ogDescription":1413,"noIndex":6,"ogImage":1414,"ogUrl":1415,"ogSiteName":685,"ogType":686,"canonicalUrls":1415,"schema":1416},"Developing GitLab Duo: How we validate and test AI models at scale","Our blog series debuts with a behind-the-scenes look at how we evaluate LLMs, match them to use cases, and fine-tune them to produce better responses for users.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659856/Blog/Hero%20Images/blog-hero-banner-1-0178-820x470-fy25.png","https://about.gitlab.com/blog/developing-gitlab-duo-how-we-validate-and-test-ai-models-at-scale","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Developing GitLab Duo: How we validate and test AI models at scale\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Susie Bitters\"}],\n        \"datePublished\": \"2024-05-09\",\n      }",{"title":1412,"description":1413,"authors":1418,"heroImage":1414,"date":1420,"body":1421,"category":801,"tags":1422},[1419],"Susie Bitters","2024-05-09","**_Generative AI marks a monumental shift in the software development industry, making it easier to develop, secure, and operate software. Our new blog series, written by our product and engineering teams, gives you an inside look at how we create, test, and deploy the AI features you need integrated throughout the enterprise. Get to know new capabilities within GitLab Duo and how they will help DevSecOps teams deliver better results for customers._**\n\nGitLab values the trust our customers place in us. Part of maintaining that trust is transparency in how we build, evaluate, and ensure the high-quality functionality of our [GitLab Duo](https://about.gitlab.com/gitlab-duo/) AI features. GitLab Duo features are powered by a diverse set of models, which allows us to support a broad set of use cases and gives our customers flexibility. GitLab is not tied to a single model provider by design. We currently use foundation models from [Google](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/blob/main/ai_gateway/models/vertex_text.py?ref_type=heads#L86) and [Anthropic](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/blob/main/ai_gateway/models/anthropic.py?ref_type=heads#L62). However, we continuously assess what models are the right matches for GitLab Duo’s use cases. In this article, we give you an inside look at our AI model validation process.\n\n> Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Watch today!](https://about.gitlab.com/seventeen/)\n\n## Understanding LLMs\n\nLarge language models (LLMs) are generative AI models that power many AI features across the platform. Trained on vast datasets, LLMs predict the next word in a sequence based on preceding context. Given an input prompt, they generate human-like text by sampling from the probability distribution of words conditioned on the prompt.\n\nLLMs enable intelligent code suggestions, conversational chatbots, code explanations, vulnerability analysis, and more. Their ability to produce diverse outputs for a given prompt makes standardized quality evaluation challenging. LLMs can be optimized for different characteristics, which is why there are so many AI models actively being developed.\n\n## Testing at scale\n\nUnlike traditional software systems where inputs and outputs can be more easily defined and tested, LLMs produce outputs that are often nuanced, diverse, and context-dependent. Testing these models requires comprehensive strategies that account for subjective and variable interpretations of quality, as well as the stochastic nature of their outputs. We, therefore, cannot judge the quality of an LLM’s output in an individual or anecdotal fashion; instead, we need to be able to examine the overall pattern of an LLM's behavior. To get a sense of those patterns, we need to test at scale. Testing at scale refers to the process of evaluating the performance, reliability, and robustness of a system or application across a large and diverse array of datasets and use cases. Our [Centralized Evaluation Framework (CEF)](https://about.gitlab.com/direction/ai-powered/ai_framework/ai_evaluation/) utilizes thousands of prompts tied to dozens of use cases to allow us to identify significant patterns and assess the overall behavior of our foundational LLMs and the GitLab Duo features in which they are integrated.\n\nTesting at scale helps us:\n\n- **Ensure quality:** Testing at scale enables us to assess the quality and reliability of these models across a wide range of scenarios and inputs. By validating the outputs of these models at scale, we can start to identify patterns and mitigate potential issues such as systematic biases, anomalies, and inaccuracies. \n- **Optimize performance:** Scaling up testing efforts allows GitLab to evaluate the performance and efficiency of LLMs under real-world conditions. This includes assessing factors such as output quality, latency, and cost to optimize the deployment and operation of these models in GitLab Duo features.\n- **Mitigate risk:** Testing LLMs at scale helps mitigate the risks associated with deploying LLMs in critical applications. By conducting thorough testing across diverse datasets and use cases, we can identify and address potential failure modes, security vulnerabilities, and ethical concerns before they impact our customers.\n\nTesting LLMs at scale is imperative for ensuring their reliability and robustness for deployment within the GitLab platform. By investing in comprehensive testing strategies that encompass diverse datasets, use cases, and scenarios, GitLab is working to unlock the full potential of AI-powered workflows while mitigating potential risks.\n\n### How we test at scale\n\nThese are the steps we take to test LLMs at scale.\n\n#### Step 1: Create a prompt library as a proxy for production\nWhile other companies view and use customer data to train their AI features, GitLab currently does not.  As a result, we needed to develop a comprehensive prompt library that is a proxy for both the scale and activity of production.\n\nThis prompt library is composed of questions and answers. The questions represent the kinds of queries or inputs that we would expect to see in production, while the answers represent a ground truth of what our ideal answer would be. This ground truth answer could also be mentally framed as a target answer. Both the question and the answer may be human generated, but are not necessarily so. These question/answer pairs give us a basis for comparison and a reference frame that allow us to tease out differences between models and features. When multiple models are asked the same question and generate different responses, we can use our ground truth answer to determine which model has provided an answer that is most closely aligned to our target and score them accordingly.\n\nAgain, a key element of a comprehensive prompt library is ensuring that it is representative of the inputs that we expect to see in production. We want to know how well foundational models fit to our specific use case, and how well our features are performing. There are numerous benchmark prompt datasets, but those datasets may not be reflective of the use cases that we see for features at GitLab. Our prompt library is designed to be specific to GitLab features and use cases.\n\n#### Step 2: Baseline model performance\n\nOnce we have crafted a prompt library that accurately reflects production activity, we feed those questions into [various models](https://about.gitlab.com/direction/ai-powered/ai_framework/ai_evaluation/foundation_models/) to test how well they serve our customer’s needs. We compare each response to our ground truth and provide it a ranking based on a series of metrics including: [Cosine Similarity Score](https://about.gitlab.com/direction/ai-powered/ai_framework/ai_evaluation/metrics/#similarity-scores), [Cross Similarity Score](https://about.gitlab.com/direction/ai-powered/ai_framework/ai_evaluation/metrics/#cross-similarity-score),  [LLM Judge](https://about.gitlab.com/direction/ai-powered/ai_framework/ai_evaluation/metrics/#llm-judge), and [Consensus Filtering with an LLM Judge](https://about.gitlab.com/direction/ai-powered/ai_framework/ai_evaluation/metrics/#consensus-filtering-with-llm-judge). This first iteration provides us a baseline for how well each model is performing, and guides our selection of a foundational model for our features. For brevity, we won’t go into the details here, but we encourage you to [learn more about more about the metrics here](https://about.gitlab.com/direction/ai-powered/ai_framework/ai_evaluation/metrics/). It is important to note this isn’t a solved problem; the wider AI industry is actively researching and developing new techniques. GitLab’s model validation team keeps a pulse on the industry and is continuously iterating on how we measure and score the LLMs GitLab Duo uses.  \n\n#### Step 3: Feature development\n\nNow that we have a baseline for our selected model's performance, we can start developing our features with confidence. While prompt engineering gets a lot of buzz, focusing entirely on changing the behavior of a model via prompting (or any other technique) without validation means that you are operating in the dark and very possibly overfitting your prompting. You may solve one problem, but be causing a dozen more. You would never know. Creating a baseline for a model's performance allows us to track how we are changing behavior over time for all our necessary use cases. At GitLab, we re-validate the performance of our features on a daily basis during active development to help ensure that all changes improve the overall functionality.\n\n#### Step 4: Iterate, iterate, iterate\n\nHere is how our experimental iterations work. Each cycle, we examine the scores from our tests at scale to identify patterns:\n\n- What are the commonalities across our weakest areas?\n- Is our feature performing poorly based on a specific metric or on a certain use case?\n- Do we see consistent errors popping up in response to a certain kind of question?\n\nOnly when we test at scale do these kinds of patterns begin to emerge and allow us to focus our experiments. Based on these patterns, we propose a variety of experiments or approaches to try to improve performance in a specific area and on a specific metric.\n\nHowever, testing at scale is both expensive and time-consuming. To enable faster and less expensive iteration, we craft a smaller scale dataset to act as a mini-proxy. The focused subset will be weighted to include question/answer pairs that we know we want to improve upon, and the broader subset will also include sampling of all the other use cases and scores to ensure that our changes aren't adversely affecting the feature broadly. Make your change and run it against the focused subset of data. How does the new response compare to the baseline? How does it compare to the ground truth?\n\nOnce we have found a prompt that addresses the specific use case we are working on with the focused subset, we validate that prompt against a broader subset of data to help ensure that it won’t adversely affect other areas of the feature. Only when we believe that the new prompt improves our performance in our target area through validation metrics AND doesn’t degrade performance elsewhere, do we push that change to production.\n\nThe entire Centralized Evaluation Framework is then run against the new prompt and we validate that it has increased the performance of the entire feature against the baseline from the day before. In this way, GitLab is constantly iterating to help ensure that you are getting the latest and greatest performance of AI-powered features across the GitLab ecosystem. This allows us to ensure that we keep working faster, together.\n\n### Making GitLab Duo even better\n\nHopefully this gives you insight into how we’re responsibly developing GitLab Duo features. This process has been developed as we’ve brought [GitLab Duo Code Suggestions](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/) and [GitLab Duo Chat](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html) to general availability. We’ve also integrated this validation process into our development process as we iterate on GitLab Duo features. It’s a lot of trial and error, and many times fixing one thing breaks three others. But we have data-driven insights into those impacts, which helps us ensure that GitLab Duo is always getting better.\n\n> Start a [free trial of GitLab Duo](https://about.gitlab.com/gitlab-duo/#free-trial) today!\n\n ## Resources\n - [GitLab AI Transparency Center](https://about.gitlab.com/ai-transparency-center/)\n - [GitLab's AI Ethics Principles for Product Development](https://handbook.gitlab.com/handbook/legal/ethics-compliance-program/ai-ethics-principles/)\n - [GitLab AI-powered Direction page](https://about.gitlab.com/direction/ai-powered/)\n\n\u003Cfigure class=video_container>\n\u003Ciframe width=560 height=315 src=\"https://www.youtube-nocookie.com/embed/LifJdU3Qagw?si=A4kl6d32wPYC4168\" title=\"YouTube video player\" frameborder=0 allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" allowfullscreen=\"\">\u003C/iframe>\n\u003C/figure>\n\n## Read more of the \"Developing GitLab Duo\" series\n\n- [Developing GitLab Duo: AI Impact analytics dashboard measures the ROI of AI](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/)\n- [Developing GitLab Duo: How we are dogfooding our AI features](https://about.gitlab.com/blog/developing-gitlab-duo-how-we-are-dogfooding-our-ai-features/) \n- [Developing GitLab Duo: Secure and thoroughly test AI-generated code](https://about.gitlab.com/blog/how-gitlab-duo-helps-secure-and-thoroughly-test-ai-generated-code/)\n- [Developing GitLab Duo: Blending AI and Root Cause Analysis to fix CI/CD pipelines](https://about.gitlab.com/blog/developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd/)",[803,951,9,695,1423],"inside GitLab",{"slug":1425,"featured":91,"template":698},"developing-gitlab-duo-how-we-validate-and-test-ai-models-at-scale","content:en-us:blog:developing-gitlab-duo-how-we-validate-and-test-ai-models-at-scale.yml","Developing Gitlab Duo How We Validate And Test Ai Models At Scale","en-us/blog/developing-gitlab-duo-how-we-validate-and-test-ai-models-at-scale.yml","en-us/blog/developing-gitlab-duo-how-we-validate-and-test-ai-models-at-scale",{"_path":1431,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1432,"content":1437,"config":1443,"_id":1445,"_type":14,"title":1446,"_source":16,"_file":1447,"_stem":1448,"_extension":19},"/en-us/blog/developing-gitlab-duo-series",{"title":1433,"description":1434,"ogTitle":1433,"ogDescription":1434,"noIndex":6,"ogImage":1414,"ogUrl":1435,"ogSiteName":685,"ogType":686,"canonicalUrls":1435,"schema":1436},"Developing GitLab Duo series","Our unique blog series, written by our Product and Engineering teams, takes you behind the scenes of our AI innovation and guides you through our newest AI features powering your DevSecOps workflow.","https://about.gitlab.com/blog/developing-gitlab-duo-series","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Developing GitLab Duo series\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"GitLab Team\"}],\n        \"datePublished\": \"2024-06-03\",\n      }",{"title":1433,"description":1434,"authors":1438,"heroImage":1414,"date":1440,"body":1441,"category":801,"tags":1442},[1439],"GitLab Team","2024-06-03","Generative AI marks a monumental shift in the software development industry, making it easier to develop, secure, and operate software. Our blog series, written by our product and engineering teams, gives you an inside look at how we create, test, and deploy the AI features you need integrated throughout the enterprise. Get to know new capabilities within GitLab Duo and how they will help DevSecOps teams deliver better results for customers.\n\n> Live demo! Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Register today](https://about.gitlab.com/seventeen/)!\n\n## 1. [How we validate and test AI models at scale](https://about.gitlab.com/blog/developing-gitlab-duo-how-we-validate-and-test-ai-models-at-scale/)\n\n- Our blog series debuts with a behind-the-scenes look at how we evaluate LLMs, match them to use cases, and fine-tune them to produce better responses for users.\n\n## 2. [AI Impact analytics dashboard measures the ROI of AI](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/)\n\n- We spotlight a new feature that provides detailed metrics, such as the Code Suggestions Usage Rate, to help understand the effectiveness of AI investments.\n\n## 3. [How we are dogfooding our AI features](https://about.gitlab.com/blog/developing-gitlab-duo-how-we-are-dogfooding-our-ai-features/)\n\n- We share real-world examples of how we integrate AI throughout our software development lifecycle and how we use metrics to gauge their success.\n\n## 4. [Secure and thoroughly test AI-generated code](https://about.gitlab.com/blog/how-gitlab-duo-helps-secure-and-thoroughly-test-ai-generated-code/)\n\n- Learn step-by-step how to enhance AI-generated code reliability and security using GitLab Duo and GitLab Pages (includes code samples and prompts).\n\n## 5. [Blending AI and Root Cause Analysis to fix CI/CD pipelines](https://about.gitlab.com/blog/developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd/)\n\n- Discover how we've infused Root Cause Analysis with AI to help remedy broken CI/CD pipelines, including example scenarios and take-away exercises.\n\n## 6. [Developing GitLab Duo: A roundup of recent Chat enhancements](https://about.gitlab.com/blog/developing-gitlab-duo-a-roundup-of-recent-chat-enhancements)\n- Discover the latest improvements to GitLab Duo Chat, including prompt cancellation and architectural upgrades. Learn how these updates streamline workflows and boost productivity.\n\n> Learn more about [GitLab Duo](https://about.gitlab.com/gitlab-duo/), our AI-powered suite of features for your DevSecOps workflow. Then start [a free trial of GitLab Duo](https://about.gitlab.com/gitlab-duo/#free-trial) to get the incredible benefits in your own organization! \n\n##  7. [Developing GitLab Duo: Use AI to remediate security vulnerabilities](https://about.gitlab.com/blog/developing-gitlab-duo-use-ai-to-remediate-security-vulnerabilities)\nThis tutorial shows how GitLab Duo Vulnerability Explanation and GitLab Duo Vulnerability Resolution, along with our other AI-powered features, can help to address vulnerabilities quickly.",[803,695,951,9],{"slug":1444,"featured":6,"template":698},"developing-gitlab-duo-series","content:en-us:blog:developing-gitlab-duo-series.yml","Developing Gitlab Duo Series","en-us/blog/developing-gitlab-duo-series.yml","en-us/blog/developing-gitlab-duo-series",{"_path":1450,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1451,"content":1457,"config":1463,"_id":1465,"_type":14,"title":1466,"_source":16,"_file":1467,"_stem":1468,"_extension":19},"/en-us/blog/devsecops-agentic-ai-now-on-gitlab-self-managed-ultimate-on-aws",{"title":1452,"description":1453,"ogTitle":1452,"ogDescription":1453,"noIndex":6,"ogImage":1454,"ogUrl":1455,"ogSiteName":685,"ogType":686,"canonicalUrls":1455,"schema":1456},"DevSecOps + Agentic AI: Now on GitLab Self-Managed Ultimate on AWS","Start using AI-powered, DevSecOps-enhanced agents in your AWS GitLab Self-Managed Ultimate instance. Enjoy the benefits of GitLab Duo and Amazon Q in your organization.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659604/Blog/Hero%20Images/Screenshot_2024-11-27_at_4.55.28_PM.png","https://about.gitlab.com/blog/devsecops-agentic-ai-now-on-gitlab-self-managed-ultimate-on-aws","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"DevSecOps + Agentic AI: Now on GitLab Self-Managed Ultimate on AWS\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Jackie Porter\"}],\n        \"datePublished\": \"2025-01-16\",\n      }",{"title":1452,"description":1453,"authors":1458,"heroImage":1454,"date":1460,"body":1461,"category":801,"tags":1462},[1459],"Jackie Porter","2025-01-16","We are thrilled to announce the GitLab Duo with Amazon Q offering, previously [shared at AWS 2024 re:Invent](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-devsecops-meets-agentic-ai/), is now available in Preview (Beta) for GitLab Self-Managed Ultimate users, at no additional cost. This milestone brings AI agentic experiences to organizations that maintain their own GitLab instance.\n\n### What does this mean for GitLab Self-Managed Ultimate customers? \n\nBeginning in our [17.8 release](https://about.gitlab.com/releases/2025/01/16/gitlab-17-8-released/), GitLab Self-Managed Ultimate customers can now take advantage of the GitLab Duo with Amazon Q Preview (Beta) [capabilities](https://docs.gitlab.com/ee/user/duo_amazon_q/). There are three key experiences you will be able to access: \n- AI-powered feature development: Use the `/q dev` quick action to transform requirements into merge-ready code.\n- Automated code reviews: Leverage `/q review` for instant, intelligent feedback on code quality and security.\n- Java modernization: Streamline Java application upgrades with `/q transform`.\n\n### Getting started with the Preview (Beta) \n\nTo use these capabilities in your GitLab Self-Managed Ultimate instance:\n\n- Ensure you meet the [prerequisites](https://docs.gitlab.com/ee/user/duo_amazon_q/setup.html#prerequisites), including upgrading to GitLab 17.8, have an Ultimate subscription (no trial access), and have the instance hosted on AWS. \n- Enable your GitLab Duo with Amazon Q integration settings.\n- Configure IAM identity and roles in AWS and the GitLab AI gateway.\n- Add the Amazon Q user to the project. \n\nFor more detailed setup information, see our [documentation](https://docs.gitlab.com/ee/user/duo_amazon_q/setup.html). \n\n### Looking ahead\n\nThis Preview release represents our commitment to bringing enterprise-grade AI capabilities to all GitLab Ultimate customers. We're excited to work closely with our customers during this Preview (Beta) period to ensure GitLab Duo with Amazon Q delivers a superior experience. \nWe encourage GitLab Self-Managed Ultimate customers to begin exploring these capabilities and provide feedback. Your input will be invaluable in shaping the future of AI-powered development in GitLab.\n\n### Get started today \n\nGitLab Self-Managed Ultimate customers can begin enabling and configuring GitLab Duo with Amazon Q as outlined in our [setup documentation](https://docs.gitlab.com/ee/user/duo_amazon_q/setup.html). To learn more about how the Preview (Beta) release can transform your software development, visit our [website](https://about.gitlab.com/partners/technology-partners/aws/#interest). \nStay tuned for regular updates as we continue to enhance and expand the capabilities of GitLab Duo with Amazon Q.\n",[803,9,717,805,1133],{"slug":1464,"featured":6,"template":698},"devsecops-agentic-ai-now-on-gitlab-self-managed-ultimate-on-aws","content:en-us:blog:devsecops-agentic-ai-now-on-gitlab-self-managed-ultimate-on-aws.yml","Devsecops Agentic Ai Now On Gitlab Self Managed Ultimate On Aws","en-us/blog/devsecops-agentic-ai-now-on-gitlab-self-managed-ultimate-on-aws.yml","en-us/blog/devsecops-agentic-ai-now-on-gitlab-self-managed-ultimate-on-aws",{"_path":1470,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1471,"content":1477,"config":1482,"_id":1484,"_type":14,"title":1485,"_source":16,"_file":1486,"_stem":1487,"_extension":19},"/en-us/blog/ditch-your-legacy-planning-tool-get-agile-with-gitlab",{"title":1472,"description":1473,"ogTitle":1472,"ogDescription":1473,"noIndex":6,"ogImage":1474,"ogUrl":1475,"ogSiteName":685,"ogType":686,"canonicalUrls":1475,"schema":1476},"Ditch your legacy planning tool: Get Agile with GitLab","Discover how Agile planning in a DevSecOps platform unlocks traceability from idea to implementation, providing a holistic view of your strategy in action.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099116/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%282%29_6IiTrEHgXVZcOI2rcPfg21_1750099116301.png","https://about.gitlab.com/blog/ditch-your-legacy-planning-tool-get-agile-with-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Ditch your legacy planning tool: Get Agile with GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Amanda Rueda\"}],\n        \"datePublished\": \"2024-03-14\",\n      }",{"title":1472,"description":1473,"authors":1478,"heroImage":1474,"date":1479,"body":1480,"category":1053,"tags":1481},[1050],"2024-03-14","When organizations are trying to get a better handle on their software development workflows — struggling with bottlenecks and silos, not being able to integrate customer feedback fast enough, and lacking project visibility — they just might turn to Agile planning. It’s a solid way to increase collaboration, foster iterations, and speed value delivery.\n\nAgile planning, though, works even better when done in conjunction with a DevSecOps platform and the methodologies that come along with it. There’s no need to choose one or the other. In fact, they are designed to work together, and both focus on continuous improvement, collaboration, and team empowerment. A DevSecOps platform helps you bring an Agile mindset to software delivery.\n\n> [Contact our sales team](https://about.gitlab.com/sales/) to get started with GitLab Agile planning today!\n\n### What problems can Agile planning and a DevSecOps platform tackle?\n\nFirst off, let’s look at some challenges teams take on with a combination of Agile planning tools and a DevSecOps platform:\n\n- Inefficiencies and bottlenecks - With context switching between a multitude of tools and a lack of visibility into teams’ work and workflows, it’s hard to see bottlenecks where time and effort is being wasted.\n- Silos and lack of collaboration - If planning and development are happening in different places and with different tools, it’s much more difficult for teams to work together to improve efficiency and deployment speed.\n- Problems scaling up - When teams and workflows are growing, it can be difficult to enable cross-functional work that will keep up with increasing demands.\n- Problems with communication - When stakeholders don’t have insight and the ability to comment in the early planning stage of a project, engaging with them becomes inefficient and their valuable views are lost.\n- Losing track of resources - Without visibility into how teams’ time  and work are allocated, it’s easy to lose sight of how valuable resources are being allocated — or even wasted.\n- Keeping up with reports - Without automation to help teams keep up with progress reports, it can be highly taxing to get needed, organized reports on time.\n\nBoth Agile planning tools and a DevSecOps platform focus on bringing customer-facing teams, product teams, and development teams together to accelerate customer feedback into production and speed up development through iterations. Each one works really well, but together they’re even more powerful.\n\n### Learning from Iron Mountain\n\n[Iron Mountain Inc.](https://about.gitlab.com/customers/iron-mountain/), a U.S.-based enterprise information management services company, has embraced Agile methods, but was looking to simplify their fragmented tooling to gain a single view of software development workflows. With an eye on reducing the costs associated with infrastructure management, while also securely increasing production velocity, the company, which has more than 225,000 customers worldwide, adopted a DevSecOps platform to scale their Agile framework. The platform enabled their Agile methodologies, and Agile supported their evolution to DevOps.\n\n“GitLab has provided us with the foundation and platform to enable our scaled Agile framework,” says Hayelom Tadesse, Iron Mountain’s vice president of enterprise technology. “We are able to collaborate within our Enterprise IT teams and our key stakeholders.”\n\n### How Agile planning + a DevSecOps platform helps organizations\n\nAgile is a software development mindset that calls for iterative changes and updates. Instead of creating an initial, one-and-only plan that is seen all the way through the software development lifecycle, Agile planning leaves room to adapt through the development phase, based on feedback from cross-functional teams and customers. Simply put, Agile planning comes into play in every phase of development.\n\nAnd an end-to-end DevSecOps platform fosters collaboration, breaks down silos, and offers visibility into what is happening throughout the software development lifecycle.\nWhen you incorporate Agile planning inside the framework of a DevSecOps platform, it’s easier and more efficient to plan, organize, track, and measure work.\n\nBy [combining Agile and a DevSecOps platform](https://about.gitlab.com/blog/gitlab-enterprise-agile-planning-add-on-for-all-roles/), organizations can:\n- **Optimize workflows** by enabling a unified view of DevSecOps metrics to resolve process bottlenecks and achieve business goals faster. A DevSecOps platform, like GitLab, can solve challenges such as fragmented tooling, gaps that block development and operations communications, administrative burdens, and difficulties with efficiently securing complex pipeline deployments.\n- **Unlock collaboration** to break down silos and drive organizational outcomes. Cross-functional teams work side-by-side in GitLab, fostering open communication and real-time feedback. By sharing ideas and iterating on work from the inception of ideas to deployment to production, everyone remains on the same page.\n- **Simplify design work** since design teams can share mockups, gather feedback, and ensure software alignment with business objectives, all within a single platform.\n- **Enable value stream management** by empowering teams with metrics and insights. A platform helps teams ship better software faster and focus on delivering customer value.\n- **Gain actionable insights** to drive continuous improvement across the entire software delivery process. By bringing Agile planning into a DevSecOps platform, code merges are effortlessly linked to issues, ensuring complete visibility and a user-friendly view of a project’s current status.\n\nIntegrated Agile capabilities bring teams together, fostering a culture of collaboration, transparency, and efficiency. And, within a single platform, it empowers teams to work together, deliver value faster, and ultimately, create software that truly matters.\n\nTake a look at how GitLab can help you plan and track work across the software development lifecycle, removing inefficiencies and scaling software delivery:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/7ICcIaDsVc8?si=5_rf2KLyoE5kuIAM\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n> [Contact our sales team](https://about.gitlab.com/sales/) to get started with GitLab Agile planning today!\n",[908,951,9],{"slug":1483,"featured":91,"template":698},"ditch-your-legacy-planning-tool-get-agile-with-gitlab","content:en-us:blog:ditch-your-legacy-planning-tool-get-agile-with-gitlab.yml","Ditch Your Legacy Planning Tool Get Agile With Gitlab","en-us/blog/ditch-your-legacy-planning-tool-get-agile-with-gitlab.yml","en-us/blog/ditch-your-legacy-planning-tool-get-agile-with-gitlab",{"_path":1489,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1490,"content":1496,"config":1502,"_id":1504,"_type":14,"title":1505,"_source":16,"_file":1506,"_stem":1507,"_extension":19},"/en-us/blog/dunelm-strengthens-business-by-enhancing-its-devsecops-culture",{"title":1491,"description":1492,"ogTitle":1491,"ogDescription":1492,"noIndex":6,"ogImage":1493,"ogUrl":1494,"ogSiteName":685,"ogType":686,"canonicalUrls":1494,"schema":1495},"Dunelm strengthens business by enhancing its DevSecOps culture","Learn how a major UK retailer is using GitLab to ensure everyone in their DevSecOps teams can work together, increasing speed, security, and trust.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749670332/Blog/Hero%20Images/groupcollaboration.jpg","https://about.gitlab.com/blog/dunelm-strengthens-business-by-enhancing-its-devsecops-culture","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Dunelm strengthens business by enhancing its DevSecOps culture\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sharon Gaudin\"}],\n        \"datePublished\": \"2023-11-16\",\n      }",{"title":1491,"description":1492,"authors":1497,"heroImage":1493,"date":1499,"body":1500,"category":1073,"tags":1501},[1498],"Sharon Gaudin","2023-11-16","When IT leaders at one of the UK’s largest retailers wanted to better support and strengthen the business, they created a culture where everyone on their DevSecOps teams could work together more naturally.\n\n“DevSecOps is a mindset and mindset is a cultural thing,” said Paul Kerrison, director of Engineering and Architecture at [Dunelm](https://about.gitlab.com/customers/dunelm/), a $1.6 billion (GBP) British home furnishings retailer. “Culture isn’t just a buzzword or something nice to have. There's a genuine value. It builds trust with not just the technology teams but with the wider business as a whole. That allows us to move faster and more safely.”\n\nKerrison, along with Jan Claeyssens, Dunelm’s DevSecOps principal engineer, talked about creating this cultural shift during GitLab’s DevSecOps World Tour event in London this fall. The 44-year-old company, which is traded on the London Stock Exchange and employs more than 11,000 people, adopted GitLab in 2020. They have been using the end-to-end platform to not only increase software development productivity and efficiency but to build inclusive teams that are focused on sharing responsibility for security.\n\nOn stage with Grigoriy Shlyapinkov, strategic customer success manager at GitLab, Claeyssens told the audience it’s been critical to cut out an us-versus-them mentality that existed between developers and security engineers. Working hard to break down those barriers, he said they’ve created a new [cohesive environment](/blog/its-time-to-put-the-sec-in-devsecops/).\n\nDunelm’s DevOps teams had been working together pretty well before the culture shift, but now they’re [collaborating](/blog/5-ways-collaboration-boosts-productivity-and-your-career/) on a different level.\n\n## Embedding security into the team\n\n“I think we can all agree that in 2023 you can no longer say you have good software if it isn't secure,” said Claeyssens. “We have to lean in and really listen to what problems other people are facing, and then figure out how to help them. We all work at Dunelm for the same thing – to [make the business better](/blog/how-devsecops-drives-business-success/). So we need to solve problems together and that’s going to be a continuous journey. This culture will require daily effort.”\n\nThat effort, though, is eased by using GitLab because everyone – from developers to security and the C-suite – can use the end-to-end platform to gain visibility into every project, Claeyssens noted.\n\n“With the platform we can serve all our users where they live,” he said. “It’s one of GitLab’s mantras. If you have everything in one place, all colleagues can see all the features, and in the end, that puts us on a journey together.” \n\n## Making room at the table\n\nKerrison said by rewarding positive behaviors and giving security team members a seat at what had once been an exclusive table, everyone can pick up the security mantle. \n\n“Including security in the leadership team and having them work closely with the engineering teams is key,” he said. “You can’t improve security if it's seen as a second-class citizen.”\n\nAnd part of making it easier for everyone to be aware of and work on security is about using automation features built into the GitLab platform, according to Claeyssens. “The platform is about automating security,” he said. “You have all the security scanners in GitLab – everything. We use the compliance frameworks. The vulnerability scanning. The scan result policies. It makes all our lives a little bit easier.”\n\nThat holistic approach to DevSecOps is what made Dunelm adopt GitLab in the first place. “GitLab sold us on the dream of the whole suite of features, and I'll be honest, I didn't know where GitHub was at the time,” said Claeyssens. “There were only so many contenders. That's how we ended up with GitLab.”\n\n## Looking ahead to working with AI\n\nWhen asked about using artificial intelligence-assisted tools to cut down on DevSecOps workloads, Kerrison said it’s really easy right now to get caught up in all the buzz about AI. Their work with it will likely start, he explained, with using AI for specific tasks, like code generation, vulnerability checks, and refactoring code. Then as it advances, the technology could start to give engineers real-time feedback on code as they’re writing it.\n\nReferencing Star Trek character Captain Jean-Luc Picard asking the starship’s food replicator for “tea, Earl Grey, hot,” Kerrison said he’s looking forward to the day he can order up microservice deliveries and the smart system will create automated testing, along with security and vulnerability scanning. \n\nWhat Kerrison’s really waiting for, though, are AI tools that have actual agency to be predictive, considering what projects and teams need assistance and then launching that work. “It would be much more trusted and can go out to find problems and fix them,” said Kerrison. “Then engineers can start to focus more on the what than the how. That’s where the power will be. Although until AI gets to the place where it can sort and pair socks after they've been through the washing, I don't think we're quite there yet.”\n\n_Dunelm, which uses GitLab Ultimate, has distribution centers, 178 stores, and a robust ecommerce operation. The company sees more than 12 million online transactions per year, while maintaining a large online catalog. More than ever, Dunelm relies on innovative technology engineering to improve customer experiences._\n\n_Read more GitLab customer stories on our [customers page](https://about.gitlab.com/customers/)._",[1075,951,9,693],{"slug":1503,"featured":6,"template":698},"dunelm-strengthens-business-by-enhancing-its-devsecops-culture","content:en-us:blog:dunelm-strengthens-business-by-enhancing-its-devsecops-culture.yml","Dunelm Strengthens Business By Enhancing Its Devsecops Culture","en-us/blog/dunelm-strengthens-business-by-enhancing-its-devsecops-culture.yml","en-us/blog/dunelm-strengthens-business-by-enhancing-its-devsecops-culture",{"_path":1509,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1510,"content":1516,"config":1521,"_id":1523,"_type":14,"title":1524,"_source":16,"_file":1525,"_stem":1526,"_extension":19},"/en-us/blog/efficient-devsecops-workflows-hands-on-python-gitlab-api-automation",{"ogTitle":1511,"schema":1512,"ogImage":1513,"ogDescription":1514,"ogSiteName":685,"noIndex":6,"ogType":686,"ogUrl":1515,"title":1511,"canonicalUrls":1515,"description":1514},"Efficient DevSecOps workflows: Hands-on python-gitlab API automation","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Efficient DevSecOps workflows: Hands-on python-gitlab API automation\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Michael Friedrich\"}],\n        \"datePublished\": \"2023-02-01\",\n      }","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659883/Blog/Hero%20Images/post-cover-image.jpg","The python-gitlab library is a useful abstraction layer for the GitLab API. Dive into hands-on examples and best practices in this tutorial.","https://about.gitlab.com/blog/efficient-devsecops-workflows-hands-on-python-gitlab-api-automation",{"title":1511,"description":1514,"authors":1517,"heroImage":1513,"date":1518,"body":1519,"category":928,"tags":1520},[1379],"2023-02-01","A friend once said in a conference presentation, “Manual work is a bug.\"\nWhen there are repetitive tasks in workflows, I tend to [come back to this\nquote](https://twitter.com/dnsmichi/status/1574087419237916672), and try to\nautomate as much as possible. For example, by querying a REST API to do an\ninventory of settings, or calling API actions to create new comments in\nGitLab issues/merge requests. The interaction with the GitLab REST API can\nbe done in different ways, using HTTP requests with curl (or\n[hurl](/blog/how-to-continously-test-web-apps-apis-with-hurl-and-gitlab-ci-cd/))\non the command line, or by writing a script in a programming language. The\nlatter can become reinventing the wheel again with raw HTTP requests code,\nand parsing the JSON responses.\n\n\nThanks to the wider GitLab community, many different languages are supported by API abstraction libraries. They provide support for all API attributes, add helper functions to get/create/delete objects, and generally aim to help developers focus. The [python-gitlab library](https://python-gitlab.readthedocs.io/en/stable/) is a feature-rich and easy-to-use library written in Python.\n\n\nIn this blog post, you will learn about the basic usage of the library by working with API objects, attributes, pagination and resultsets, and dive into more concrete use cases collecting data, printing summaries and writing data to the API to create comments and commits. There is a whole lot more to learn, with many of the use cases inspired by wider community questions on the forum, Hacker News, issues, etc.\n\n\nThis blog post is a long read, so feel free to stick with the beginner's tutorial or skip to the advanced [DevSecOps](https://about.gitlab.com/topics/devsecops/) use cases, development tips and code optimizations by navigating the table of contents:\n\n\n- [Getting started](#getting-started)\n\n- [Configuration](#configuration)\n\n- [Managing objects: The GitLab Object](#managing-objects-the-gitlab-object)\n    - [Objects managers and loading](#objects-managers-and-loading)\n    - [Pagination of results](#pagination-of-results)\n    - [Working with object relationships](#working-with-object-relationships)\n    - [Working with different object collection scopes](#working-with-different-object-collection-scopes)\n- [DevSecOps use cases for API read actions](#devsecops-use-cases-for-api-read-actions)\n    - [List branches by merged state](#list-branches-by-merged-state)\n    - [Print project settings for review: MR approval rules](#print-project-settings-for-review-mr-approval-rules)\n    - [Inventory: Get all CI/CD variables that are protected or masked](#inventory-get-all-cicd-variables-that-are-protected-or-masked)\n    - [Download a file from the repository](#download-a-file-from-the-repository)\n    - [Migration help: List all certificate-based Kubernetes clusters](#migration-help-list-all-certificate-based-kubernetes-clusters)\n    - [Team efficiency: Check if existing merge requests need to be rebased after merging a huge refactoring MR](#team-efficiency-check-if-existing-merge-requests-need-to-be-rebased-after-merging-a-huge-refactoring-mr)\n- [DevSecOps use cases for API write actions](#devsecops-use-cases-for-api-write-actions)\n    - [Move epics between groups](#move-epics-between-groups)\n    - [Compliance: Ensure that project settings are not overridden](#compliance-ensure-that-project-settings-are-not-overridden)\n    - [Taking notes, generate due date overview](#taking-notes-generate-due-date-overview)\n    - [Create issue index in a Markdown file, grouped by labels](#create-issue-index-in-a-markdown-file-grouped-by-labels)\n- [Advanced DevSecOps workflows](#advanced-devsecops-workflows)\n    - [Container images to run API scripts](#container-images-to-run-api-scripts)\n    - [CI/CD integration: Release and changelog generation](#cicd-integration-release-and-changelog-generation)\n    - [CI/CD integration: Pipeline report summaries](#cicd-integration-pipeline-report-summaries)\n- [Development tips](#development-tips)\n    - [Advanced custom configuration](#advanced-custom-configuration)\n    - [CI/CD code linting for different Python versions](#cicd-code-linting-for-different-python-versions)\n- [Optimize code and performance](#optimize-code-and-performance)\n    - [Lazy objects](#lazy-objects)\n    - [Object-oriented programming](#object-oriented-programming)\n- [More use cases](#more-use-cases)\n\n- [Conclusion](#conclusion)\n\n\n## Getting started\n\n\nThe python-gitlab documentation is a great resource for [getting started guides](https://python-gitlab.readthedocs.io/en/stable/api-usage.html), object types and their available methods, and combined workflow examples. Together with the [GitLab API resources documentation](https://docs.gitlab.com/ee/api/api_resources.html), which provides the object attributes that can be used, these are the best resources to get going.\n\n\nThe code examples in this blog post require Python 3.8+, and the `python-gitlab` library. Additional requirements are specified in the `requirements.txt` file – one example requires `pyyaml` for YAML config parsing. To follow and practice the use cases code, it is recommended to clone the project, install the requirements and run the scripts. Example with Homebrew on macOS:\n\n\n```shell\n\ngit clone https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python.git\n\n\ncd gitlab-api-python\n\n\nbrew install python\n\n\npip3 install -r requirements.txt\n\n\npython3 \u003Cscriptname>.py\n\n```\n\n\nThe scripts are intentionally not using a common shared library that provides generic functions for parameter reads, or additional helper functionality, for example. The idea is to show easy-to-follow examples that can be used stand-alone for testing, and only require installing the `python-gitlab` library as a dependency. Improving the code for production use is recommended. This can also help with building a maintained API tooling project that, for example, includes container images and CI/CD templates for developers to consume on a DevSecOps platform.\n\n\n## Configuration\n\n\nWithout configuration, python-gitlab will run unauthenticated requests against the default server `https://gitlab.com`. The most common configuration settings relate to the GitLab instance to connect to, and the authentication method by specifying access tokens. Python-gitlab supports different types of configuration: A configuration file or environment variables.\n\n\nThe [configuration file](https://python-gitlab.readthedocs.io/en/stable/cli-usage.html#cli-configuration) is available for the API library bindings, and the CLI (the CLI is not explained in this blog post). The configuration file supports [credential helpers](https://python-gitlab.readthedocs.io/en/stable/cli-usage.html#credential-helpers) to access tokens directly.\n\n\nEnvironment variables as an alternative configuration method provide an easy way to run the script on terminal, integrate into container images, and prepare them for running in CI/CD pipelines.\n\n\nThe configuration needs to be loaded into the Python script context. Start by importing the `os` library to fetch environment variables using the `os.environ.get()` method. The first parameter specifies the key, the second parameter sets the default value when the variable is not available in the environment.\n\n\n```python\n\nimport os\n\n\ngl_server = os.environ.get('GL_SERVER', 'https://gitlab.com')\n\n\nprint(gl_server)\n\n```\n\n\nThe parametrization on the terminal can happen directly for the command only, or exported into the shell environment.\n\n\n```shell\n\n$ GL_SERVER=’https://gitlab.company.com’ python3 script.py\n\n\n$ export GL_SERVER=’https://gitlab.company.com’\n\n$ python3 script.py\n\n```\n\n\nIt is recommended to add safety checks to ensure that all variables are set before continuing to run the program. The following snippet imports the required libraries, reads the `GL_SERVER` environment variable and expects the user to set the `GL_TOKEN` variable. If not, the script prints and throws errors, and calls `sys.exit(1)` indicating an error status.\n\n\n```python\n\nimport gitlab\n\nimport os\n\nimport sys\n\n\nGITLAB_SERVER = os.environ.get('GL_SERVER', 'https://gitlab.com')\n\nGITLAB_TOKEN = os.environ.get('GL_TOKEN')\n\n\nif not GITLAB_TOKEN:\n    print(\"Please set the GL_TOKEN env variable.\")\n    sys.exit(1)\n```\n\n\nWe will look into a more detailed example now which creates a connection to the API and makes an actual data request.\n\n\n## Managing objects: The GitLab object\n\n\nAny interaction with the API requires the GitLab object to be instantiated. This is the entry point to configure the GitLab server to connect, authenticate using access tokens, and more global settings for pagination, object loading and more.\n\n\nThe following example runs an unauthenticated request against GitLab.com. It is possible to access public API endpoints and for example get a specific [.gitignore template for Python](https://python-gitlab.readthedocs.io/en/stable/gl_objects/templates.html#gitignore-templates).\n\n\n[python_gitlab_object_unauthenticated.py](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/python_gitlab_object_unauthenticated.py)\n\n\n```python\n\nimport gitlab\n\n\ngl = gitlab.Gitlab()\n\n\n# Get .gitignore templates without authentication\n\ngitignore_templates = gl.gitignores.get('Python')\n\n\nprint(gitignore_templates.content)\n\n```\n\n\nThe next sections provide more insights into:\n\n\n- [Objects managers and loading](#objects-managers-and-loading)\n\n- [Pagination of results](#pagination-of-results)\n\n- [Working with object relationships](#working-with-object-relationships)\n\n- [Working with different object collection scopes](#working-with-different-object-collection-scopes)\n\n\n### Objects managers and loading\n\n\nThe python-gitlab library provides access to GitLab resources using so-called “[managers](https://python-gitlab.readthedocs.io/en/stable/api-usage.html#managers)\". Each manager type implements methods to work with the datasets (list, get, etc.).\n\n\nThe script shows how to access subgroups, direct projects, all projects including subgroups, issues, epics and todos. These methods and API endpoint require authentication to access all attributes. The code snippet, therefore, uses variables to get the authentication token, and also uses the `GROUP_ID` variable to specify a main group at which to start searching.\n\n\n```python\n\n#!/usr/bin/env python\n\n\nimport gitlab\n\nimport os\n\nimport sys\n\n\nGITLAB_SERVER = os.environ.get('GL_SERVER', 'https://gitlab.com')\n\n# https://gitlab.com/gitlab-de/use-cases/\n\nGROUP_ID = os.environ.get('GL_GROUP_ID', 16058698)\n\nGITLAB_TOKEN = os.environ.get('GL_TOKEN')\n\n\nif not GITLAB_TOKEN:\n    print(\"Please set the GL_TOKEN env variable.\")\n    sys.exit(1)\n\ngl = gitlab.Gitlab(GITLAB_SERVER, private_token=GITLAB_TOKEN)\n\n\n# Main\n\nmain_group = gl.groups.get(GROUP_ID)\n\n\nprint(\"Sub groups\")\n\nfor sg in main_group.subgroups.list():\n    print(\"Subgroup name: {sg}\".format(sg=sg.name))\n\nprint(\"Projects (direct)\")\n\nfor p in main_group.projects.list():\n    print(\"Project name: {p}\".format(p=p.name))\n\nprint(\"Projects (including subgroups)\")\n\nfor p in main_group.projects.list(include_subgroups=True, all=True):\n     print(\"Project name: {p}\".format(p=p.name))\n\nprint(\"Issues\")\n\nfor i in main_group.issues.list(state='opened'):\n    print(\"Issue title: {t}\".format(t=i.title))\n\nprint(\"Epics\")\n\nfor e in main_group.issues.list():\n    print(\"Epic title: {t}\".format(t=e.title))\n\nprint(\"Todos\")\n\nfor t in gl.todos.list(state='pending'):\n    print(\"Todo: {t} url: {u}\".format(t=t.body, u=t.target_url\n```\n\n\nYou can run the script [`python_gitlab_object_manager_methods.py`](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/python_gitlab_object_manager_methods.py) by overriding the `GROUP_ID` variable on GitLab.com SaaS for your own group to analyze. The `GL_SERVER` variable needs to be specified for self-managed instance targets. `GL_TOKEN` must provide the personal access token.\n\n\n```shell\n\nexport GL_TOKEN=xxx\n\n\nexport GL_SERVER=”https://gitlab.company.com”\n\n\nexport GL_SERVER=”https://gitlab.com”\n\n\nexport GL_GROUP_ID=1234\n\n\npython3 python_gitlab_object_manager_methods.py\n\n```\n\n\nGoing forward, the example snippets won’t show the Python headers and environment variable parsing to focus on the algorithm and functionality. All scripts are open source under the MIT license and available in [this project](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python).\n\n\n### Pagination of results\n\n\nBy default, the GitLab API does not return all result sets and requires the clients to use [pagination](https://docs.gitlab.com/ee/api/rest/index.html#pagination) to iterate through all result pages. The python-gitlab library [allows users to specify the settings](https://python-gitlab.readthedocs.io/en/stable/api-usage.html#pagination) globally in the GitLab object, or on each `list()` call. By default, all result sets would fire API requests, which can slow down the script execution. The recommended way is using `iterator=True` which returns a generator object, and API calls are fired on-demand when accessing the object.\n\n\nThe following example searches for the group name `everyonecancontribute`, and uses keyset pagination with 100 results on each page. The iterator is set to true on `gl.groups.list(iterator=True)` to fetch new result sets on demand. If the searched group name is found, the loop breaks and prints a summary, including measuring the duration of the complete search request.\n\n\n```python\n\nSEARCH_GROUP_NAME=\"everyonecancontribute\"\n\n\n# Use keyset pagination\n\n# https://python-gitlab.readthedocs.io/en/stable/api-usage.html#pagination\n\ngl = gitlab.Gitlab(GITLAB_SERVER, private_token=GITLAB_TOKEN,\n    pagination=\"keyset\", order_by=\"id\", per_page=100)\n\n# Iterate over the list, and fire new API calls in case the result set does not match yet\n\ngroups = gl.groups.list(iterator=True)\n\n\nfound_page = 0\n\nstart = timer()\n\n\nfor group in groups:\n    if SEARCH_GROUP_NAME == group.name:\n        # print(group) # debug\n        found_page = groups.current_page\n        break\n\nend = timer()\n\n\nduration = f'{end-start:.2f}'\n\n\nif found_page > 0:\n    print(\"Pagination API example for Python with GitLab{desc} - found group {g} on page {p}, duration {d}s\".format(\n        desc=\", the DevSecOps platform\", g=SEARCH_GROUP_NAME, p=found_page, d=duration))\nelse:\n    print(\"Could not find group name '{g}', duration {d}\".format(g=SEARCH_GROUP_NAME, d=duration))\n```\n\n\nExecuting `python_gitlab_pagination.py` found the [everyonecancontribute group](https://gitlab.com/everyonecancontribute) on page 5.\n\n\n```shell\n\n$ python3 python_gitlab_pagination.py\n\nPagination API example for Python with GitLab, the DevSecOps platform - found group everyonecancontribute on page 5, duration 8.51s\n\n```\n\n\n### Working with object relationships\n\n\nWhen working with object relationships – for example, collecting all projects in a given group – additional steps need to be taken. The returned project objects provide limited attributes by default. Manageable objects require an additional `get()` call which requests the full project object from the API in the background. This on-demand workflow helps to avoid waiting times and traffic by reducing the immediately returned attributes.\n\n\nThe following example illustrates the problem by looping through all projects in a group, and tries to call the `project.branches.list()` function, raising an exception in the try/except flow. The second example gets a manageable project object and tries the function call again.\n\n\n```python\n\n# Main\n\ngroup = gl.groups.get(GROUP_ID)\n\n\n# Collect all projects in group and subgroups\n\nprojects = group.projects.list(include_subgroups=True, all=True)\n\n\nfor project in projects:\n    # Try running a method on a weak object\n    try:\n       print(\"🤔 Project: {pn} 💡 Branches: {b}\\n\".format(\n        pn=project.name,\n        b=\", \".join([x.name for x in project.branches.list()])))\n    except Exception as e:\n        print(\"Got exception: {e} \\n ===================================== \\n\".format(e=e))\n\n    # Retrieve a full manageable project object\n    # https://python-gitlab.readthedocs.io/en/stable/gl_objects/groups.html#examples\n    manageable_project = gl.projects.get(project.id)\n\n    # Print a method available on a manageable object\n    print(\"🤔 Project: {pn} 💡 Branches: {b}\\n\".format(\n        pn=manageable_project.name,\n        b=\", \".join([x.name for x in manageable_project.branches.list()])))\n```\n\n\nThe exception handler in the python-gitlab library prints the error message, and also links to the documentation. It is helpful to take a debugging note that objects might not be available to manage whenever you cannot access object attributes or function calls.\n\n\n```shell\n\n$ python3 python_gitlab_manageable_objects.py\n\n\n🤔 Project: GitLab API Playground 💡 Branches: cicd-demo-automated-comments, docs-mr-approval-settings, main\n\n\nGot exception: 'GroupProject' object has no attribute 'branches'\n\n\n\u003Cclass 'gitlab.v4.objects.projects.GroupProject'> was created via a\n\nlist() call and only a subset of the data may be present. To ensure\n\nall data is present get the object using a get(object.id) call. For\n\nmore details, see:\n\n\nhttps://python-gitlab.readthedocs.io/en/v3.8.1/faq.html#attribute-error-list\n =====================================\n```\n\n\nThe full script is located [here](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/python_gitlab_manageable_objects.py).\n\n\n### Working with different object collection scopes\n\n\nSometimes, the script needs to collect all projects from a self-managed instance, or from a group with subgroups, or from a single project. The latter is helpful for faster testing on the required attributes, and the group fetch helps with testing at scale later. The following snippet collects all project objects into the `projects` list, and appends objects from different incoming configuration. You will also see the manageable object pattern for project in groups again.\n\n\n```python\n    # Collect all projects, or prefer projects from a group id, or a project id\n    projects = []\n\n    # Direct project ID\n    if PROJECT_ID:\n        projects.append(gl.projects.get(PROJECT_ID))\n\n    # Groups and projects inside\n    elif GROUP_ID:\n        group = gl.groups.get(GROUP_ID)\n\n        for project in group.projects.list(include_subgroups=True, all=True):\n            # https://python-gitlab.readthedocs.io/en/stable/gl_objects/groups.html#examples\n            manageable_project = gl.projects.get(project.id)\n            projects.append(manageable_project)\n\n    # All projects on the instance (may take a while to process)\n    else:\n        projects = gl.projects.list(get_all=True)\n```\n\n\nThe full example is located in [this script](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/get_mr_approval_rules.py) for listing MR approval rules settings for specified project targets.\n\n\n## DevSecOps use cases for API read actions\n\n\nThe authenticated access token needs [`read_api` scope](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#personal-access-token-scopes).\n\n\nThe following use cases are discussed:\n\n\n- [List branches by merged state](#list-branches-by-merged-state)\n\n- [Print project settings for review: MR approval rules](#print-project-settings-for-review-mr-approval-rules)\n\n- [Inventory: Get all CI/CD variables that are protected or masked](#inventory-get-all-cicd-variables-that-are-protected-or-masked)\n\n- [Download a file from the repository](#download-a-file-from-the-repository)\n\n- [Migration help: List all certificate-based Kubernetes clusters](#migration-help-list-all-certificate-based-kubernetes-clusters)\n\n- [Team efficiency: Check if existing merge requests need to be rebased after merging a huge refactoring MR](#team-efficiency-check-if-existing-merge-requests-need-to-be-rebased-after-merging-a-huge-refactoring-mr)\n\n\n### List branches by merged state\n\n\nA common ask is to do some Git housekeeping in the project, and see how many merged and unmerged branches are floating around. [A question on the GitLab community forum](https://forum.gitlab.com/t/python-gitlab-project-branch-list-filter/80257) about filtering branch listings inspired me look into writing a [script](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/get_branches_by_state.py) that helps achieve this goal. The `branches.list()` method returns all branch objects that are stored in a temporary list for later processing for two loops: Collecting merged branch names, and not merged branch names. The `merged` attribute on the `branch` object is a boolean value indicating whether the branch has been merged.\n\n\n```python\n\nproject = gl.projects.get(PROJECT_ID, lazy=False, pagination=\"keyset\", order_by=\"updated_at\", per_page=100)\n\n\n# Get all branches\n\nreal_branches = []\n\nfor branch in project.branches.list():\n    real_branches.append(branch)\n\nprint(\"All branches\")\n\nfor rb in real_branches:\n    print(\"Branch: {b}\".format(b=rb.name))\n\n# Get all merged branches\n\nmerged_branches_names = []\n\nfor branch in real_branches:\n    if branch.default:\n        continue # ignore the default branch for merge status\n\n    if branch.merged:\n        merged_branches_names.append(branch.name)\n\nprint(\"Branches merged: {b}\".format(b=\", \".join(merged_branches_names)))\n\n\n# Get un-merged branches\n\nnot_merged_branches_names = []\n\nfor branch in real_branches:\n    if branch.default:\n        continue # ignore the default branch for merge status\n\n    if not branch.merged:\n        not_merged_branches_names.append(branch.name)\n\nprint(\"Branches not merged: {b}\".format(b=\", \".join(not_merged_branches_names)))\n\n```\n\n\nThe workflow is intentionally a step-by-step read, you can practice optimizing the Python code for the conditional branch name collection.\n\n\n\n### Print project settings for review: MR approval rules\n\n\nThe following [script](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/get_mr_approval_rules.py) walks through all collected project objects, and checks whether approval rules are specified. If the list length is greater than zero, it loops over the list and prints the settings using a JSON pretty-print method.\n\n\n```python\n    # Loop over projects and print the settings\n    # https://python-gitlab.readthedocs.io/en/stable/gl_objects/merge_request_approvals.html\n    for project in projects:\n        if len(project.approvalrules.list()) > 0:\n            #print(project) #debug\n            print(\"# Project: {name}, ID: {id}\\n\\n\".format(name=project.name_with_namespace, id=project.id))\n            print(\"[MR Approval settings]({url}/-/settings/merge_requests)\\n\\n\".format(url=project.web_url))\n\n            for ar in project.approvalrules.list():\n                print(\"## Approval rule: {name}, ID: {id}\".format(name=ar.name, id=ar.id))\n                print(\"\\n```json\\n\")\n                print(json.dumps(ar.attributes, indent=2)) # TODO: can be more beautiful, but serves its purpose with pretty print JSON\n                print(\"\\n```\\n\")\n\n```\n\n\n### Inventory: Get all CI/CD variables that are protected or masked\n\n\n[CI/CD variables](https://docs.gitlab.com/ee/ci/variables/) are helpful for pipeline parameterization, and can be configured globally on the instance, in groups and in projects. Secrets, passwords and otherwise sensitive information could be stored there, too. Sometimes it can be necessary to get an overview of all CI/CD variables that are either protected or masked to get a sense of how many variables need to be updated when rotating tokens for example.\n\n\nThe following [script](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/get_all_cicd_variables_masked_or_protected.py) gets all groups and projects and tries to collect the CI/CD variables from the global instance (requires admin permissions), groups and projects (requires maintainer/owner permissions). It prints all CI/CD variables that are either protected or masked, adding that a potential secret value is stored.\n\n\n```python\n\n#!/usr/bin/env python\n\n\nimport gitlab\n\nimport os\n\nimport sys\n\n\n# Helper function to evaluate secrets and print the variables\n\ndef eval_print_var(var):\n    if var.protected or var.masked:\n        print(\"🛡️🛡️🛡️ Potential secret: Variable '{name}', protected {p}, masked: {m}\".format(name=var.key,p=var.protected,m=var.masked))\n\nGITLAB_SERVER = os.environ.get('GL_SERVER', 'https://gitlab.com')\n\nGITLAB_TOKEN = os.environ.get('GL_TOKEN') # token requires maintainer+ permissions. Instance variables require admin access.\n\nPROJECT_ID = os.environ.get('GL_PROJECT_ID') #optional\n\nGROUP_ID = os.environ.get('GL_GROUP_ID', 8034603) # https://gitlab.com/everyonecancontribute\n\n\nif not GITLAB_TOKEN:\n    print(\"🤔 Please set the GL_TOKEN env variable.\")\n    sys.exit(1)\n\ngl = gitlab.Gitlab(GITLAB_SERVER, private_token=GITLAB_TOKEN)\n\n\n# Collect all projects, or prefer projects from a group id, or a project id\n\nprojects = []\n\n# Collect all groups, or prefer group from a group id\n\ngroups = []\n\n\n# Direct project ID\n\nif PROJECT_ID:\n    projects.append(gl.projects.get(PROJECT_ID))\n\n# Groups and projects inside\n\nelif GROUP_ID:\n    group = gl.groups.get(GROUP_ID)\n\n    for project in group.projects.list(include_subgroups=True, all=True):\n        # https://python-gitlab.readthedocs.io/en/stable/gl_objects/groups.html#examples\n        manageable_project = gl.projects.get(project.id)\n        projects.append(manageable_project)\n\n    groups.append(group)\n\n# All projects/groups on the instance (may take a while to process, use iterators to fetch on-demand).\n\nelse:\n    projects = gl.projects.list(iterator=True)\n    groups = gl.groups.list(iterator=True)\n\nprint(\"# List of all CI/CD variables marked as secret (instance, groups, projects)\")\n\n\n# https://python-gitlab.readthedocs.io/en/stable/gl_objects/variables.html\n\n\n# Instance variables (if the token has permissions)\n\nprint(\"Instance variables, if accessible\")\n\ntry:\n    for i_var in gl.variables.list(iterator=True):\n        eval_print_var(i_var)\nexcept:\n    print(\"No permission to fetch global instance variables, continueing without.\")\n    print(\"\\n\")\n\n# group variables (maintainer permissions for groups required)\n\nfor group in groups:\n    print(\"Group {n}, URL: {u}\".format(n=group.full_path, u=group.web_url))\n    for g_var in group.variables.list(iterator=True):\n        eval_print_var(g_var)\n\n    print(\"\\n\")\n\n# Loop over projects and print the settings\n\nfor project in projects:\n    # skip archived projects, they throw 403 errors\n    if project.archived:\n        continue\n\n    print(\"Project {n}, URL: {u}\".format(n=project.path_with_namespace, u=project.web_url))\n    for p_var in project.variables.list(iterator=True):\n        eval_print_var(p_var)\n\n    print(\"\\n\")\n```\n\n\nThe script intentionally does not print the variable values, this is left as an exercise for safe environments. The recommended way of storing secrets is to [use external providers](https://docs.gitlab.com/ee/ci/secrets/).\n\n\n### Download a file from the repository\n\n\nThe [script](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/get_raw_file_content.py) goal is download a file path from a specified branch name, and store its content in a new file.\n\n\n```python\n\n# Goal: Try to download README.md from https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/README.md\n\nFILE_NAME = 'README.md'\n\nBRANCH_NAME = 'main'\n\n\n# Search the file in the repository tree and get the raw blob\n\nfor f in project.repository_tree():\n    print(\"File path '{name}' with id '{id}'\".format(name=f['name'], id=f['id']))\n\n    if f['name'] == FILE_NAME:\n        f_content = project.repository_raw_blob(f['id'])\n        print(f_content)\n\n# Alternative approach: Get the raw file from the main branch\n\nraw_content = project.files.raw(file_path=FILE_NAME, ref=BRANCH_NAME)\n\nprint(raw_content)\n\n\n# Store the file on disk\n\nwith open('raw_README.md', 'wb') as f:\n    project.files.raw(file_path=FILE_NAME, ref=BRANCH_NAME, streamed=True, action=f.write)\n```\n\n\n### Migration help: List all certificate-based Kubernetes clusters\n\n\nThe certificate-based integration of Kubernetes clusters into GitLab [was deprecated](https://docs.gitlab.com/ee/update/deprecations.html#self-managed-certificate-based-integration-with-kubernetes). To help with migration plans, the inventory of existing groups and projects can be automated using the GitLab API.\n\n\n\n```python\n\ngroups = [ ]\n\n\n# get GROUP_ID group\n\ngroups.append(gl.groups.get(GROUP_ID))\n\n\nfor group in groups:\n    for sg in group.subgroups.list(include_subgroups=True, all=True):\n        real_group = gl.groups.get(sg.id)\n        groups.append(real_group)\n\ngroup_clusters = {}\n\nproject_clusters = {}\n\n\nfor group in groups:\n    #Collect group clusters\n    g_clusters = group.clusters.list()\n\n    if len(g_clusters) > 0:\n        group_clusters[group.id] = g_clusters\n\n    # Collect all projects in group and subgroups and their clusters\n    projects = group.projects.list(include_subgroups=True, all=True)\n\n    for project in projects:\n        # https://python-gitlab.readthedocs.io/en/stable/gl_objects/groups.html#examples\n        manageable_project = gl.projects.get(project.id)\n\n        # skip archived projects\n        if project.archived:\n            continue\n\n        p_clusters = manageable_project.clusters.list()\n\n        if len(p_clusters) > 0:\n            project_clusters[project.id] = p_clusters\n\n# Print summary\n\nprint(\"## Group clusters\\n\\n\")\n\nfor g_id, g_clusters in group_clusters.items():\n    url = gl.groups.get(g_id).web_url\n    print(\"Group ID {g_id}: {u}\\n\\n\".format(g_id=g_id, u=url))\n    print_clusters(g_clusters)\n\nprint(\"## Project clusters\\n\\n\")\n\nfor p_id, p_clusters in project_clusters.items():\n    url = gl.projects.get(p_id).web_url\n    print(\"Project ID {p_id}: {u}\\n\\n\".format(p_id=p_id, u=url))\n    print_clusters(p_clusters)\n```\n\n\nThe full script is available [here](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/list_cert_based_kubernetes_clusters.py).\n\n\n### Team efficiency: Check if existing merge requests need to be rebased after merging a huge refactoring MR\n\n\nThe [GitLab handbook](https://handbook.gitlab.com/handbook/) repository is a large monorepo with many merge requests created, reviewed, approved and merged. Some reviews take longer than others, and some merge requests touch multiple pages when renaming a string, or [all handbook pages](/handbook/about/#count-handbook-pages). The marketing handbook needed restructuring (think of code refactoring), and as such, many directories and paths were moved or renamed. [The issue tasks](https://gitlab.com/gitlab-com/www-gitlab-com/-/issues/13991#tasks) grew over time, and I was worried that other merge requests would run into conflicts after merging the huge changes. I remembered that the python-gitlab can fetch all merge requests in a given project, including details on the Git branch, source paths changed and much more.\n\n\nThe resulting script configures a list of source paths that are touched by all merge requests, and checks against the merge request diff with `mr.diffs.list()` and comparing if a pattern matches against the value in `old_path`. If a match is found, the script logs it, and saves the merge request in the `seen_mr` dictionary for the summary later. There are additional attributes collected to allow printing a Markdown task list with URLs for easier copy-paste into [issue descriptions](https://gitlab.com/gitlab-com/www-gitlab-com/-/issues/13991#additional-tasks). The full script is located [here](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/search_mr_contains_updated_path.py).\n\n\n\n```python\n\nPATH_PATTERNS = [\n    'path/to/handbook/source/page.md',\n]\n\n\n# Only list opened MRs\n\n# https://python-gitlab.readthedocs.io/en/stable/gl_objects/merge_requests.html#project-merge-requests\n\nmrs = project.mergerequests.list(state='opened', iterator=True)\n\n\nseen_mr = {}\n\n\nfor mr in mrs:\n    # https://docs.gitlab.com/ee/api/merge_requests.html#list-merge-request-diffs\n    real_mr = project.mergerequests.get(mr.get_id())\n    real_mr_id = real_mr.attributes['iid']\n    real_mr_url = real_mr.attributes['web_url']\n\n    for diff in real_mr.diffs.list(iterator=True):\n        real_diff = real_mr.diffs.get(diff.id)\n\n        for d in real_diff.attributes['diffs']:\n            for p in PATH_PATTERNS:\n                if p in d['old_path']:\n                    print(\"MATCH: {p} in MR {mr_id}, status '{s}', title '{t}' - URL: {mr_url}\".format(\n                        p=p,\n                        mr_id=real_mr_id,\n                        s=mr_status,\n                        t=real_mr.attributes['title'],\n                        mr_url=real_mr_url))\n\n                    if not real_mr_id in seen_mr:\n                        seen_mr[real_mr_id] = real_mr\n\nprint(\"\\n# MRs to update\\n\")\n\n\nfor id, real_mr in seen_mr.items():\n    print(\"- [ ] !{mr_id} - {mr_url}+ Status: {s}, Title: {t}\".format(\n        mr_id=id,\n        mr_url=real_mr.attributes['web_url'],\n        s=real_mr.attributes['detailed_merge_status'],\n        t=real_mr.attributes['title']))\n```\n\n\n\n## DevSecOps use cases for API write actions\n\n\nThe authenticated access token needs full [`api` scope](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#personal-access-token-scopes).\n\n\nThe following use cases are discussed:\n\n\n- [Move epics between groups](#move-epics-between-groups)\n\n- [Compliance: Ensure that project settings are not overridden](#compliance-ensure-that-project-settings-are-not-overridden)\n\n- [Taking notes, generate due date overview](#taking-notes-generate-due-date-overview)\n\n- [Create issue index in a Markdown file, grouped by labels](#create-issue-index-in-a-markdown-file-grouped-by-labels)\n\n\n### Move epics between groups\n\n\nSometimes it is necessary to move epics, similar to issues, into a different group. A question in the GitLab marketing Slack channel inspired me to look into a [feature proposal for the UI](https://gitlab.com/gitlab-org/gitlab/-/issues/12689), [quick actions](/blog/improve-your-gitlab-productivity-with-these-10-tips/), and later, thinking about writing an API script to automate the steps. The idea is simple: Move an epic from a source group to a target group, and copy its title, description and labels. Since epics allow to group issues, they need to be reassigned to the target epic, too. Parent-child epic relationships need to be taken into account to: All child epics of the source epics need to be reassigned to the target epic.\n\n\nThe following script looks up all source [epic attributes](https://python-gitlab.readthedocs.io/en/stable/gl_objects/epics.html) first, and then creates a new target epic with minimal attributes: title and description. The labels list is copied and the changes are persisted with the `save()` call. The issues assigned to the epic need to be re-created in the target epic. The `create()` call actually creates the relationship item, not a new issue object itself. The child epics move requires a different approach, since the relationship is vice versa: The `parent_id` on the child epic needs to be compared against the source epic ID, and if matching, updated to the target epic ID. After copying everything successfully, the source epic needs to be changed into the `closed` state.\n\n\n\n```python\n\n#!/usr/bin/env python\n\n\n# Description: Show how epics can be moved between groups, including title, description, labels, child epics and issues.\n\n# Requirements: python-gitlab Python libraries. GitLab API write access, and maintainer access to all configured groups/projects.\n\n# Author: Michael Friedrich \u003Cmfriedrich@gitlab.com>\n\n# License: MIT, (c) 2023-present GitLab B.V.\n\n\nimport gitlab\n\nimport os\n\nimport sys\n\n\nGITLAB_SERVER = os.environ.get('GL_SERVER', 'https://gitlab.com')\n\n# https://gitlab.com/gitlab-de/use-cases/gitlab-api\n\nSOURCE_GROUP_ID = os.environ.get('GL_SOURCE_GROUP_ID', 62378643)\n\n# https://gitlab.com/gitlab-de/use-cases/gitlab-api/epic-move-target\n\nTARGET_GROUP_ID = os.environ.get('GL_TARGET_GROUP_ID', 62742177)\n\n# https://gitlab.com/groups/gitlab-de/use-cases/gitlab-api/-/epics/1\n\nEPIC_ID = os.environ.get('GL_EPIC_ID', 1)\n\nGITLAB_TOKEN = os.environ.get('GL_TOKEN')\n\n\nif not GITLAB_TOKEN:\n    print(\"Please set the GL_TOKEN env variable.\")\n    sys.exit(1)\n\ngl = gitlab.Gitlab(GITLAB_SERVER, private_token=GITLAB_TOKEN)\n\n\n# Main\n\n# Goal: Move epic to target group, including title, body, labels, and child epics and issues.\n\nsource_group = gl.groups.get(SOURCE_GROUP_ID)\n\ntarget_group = gl.groups.get(TARGET_GROUP_ID)\n\n\n# Create a new target epic and copy all its items, then close the source epic.\n\nsource_epic = source_group.epics.get(EPIC_ID)\n\n# print(source_epic) #debug\n\n\nepic_title = source_epic.title\n\nepic_description = source_epic.description\n\nepic_labels = source_epic.labels\n\nepic_issues = source_epic.issues.list()\n\n\n# Create the epic with minimal attributes\n\ntarget_epic = target_group.epics.create({\n    'title': epic_title,\n    'description': epic_description,\n})\n\n\n# Assign the list\n\ntarget_epic.labels = epic_labels\n\n\n# Persist the changes in the new epic\n\ntarget_epic.save()\n\n\n# Epic issues need to be re-assigned in a loop\n\nfor epic_issue in epic_issues:\n    ei = target_epic.issues.create({'issue_id': epic_issue.id})\n\n# Child epics need to update their parent_id to the new epic\n\n# Need to search in all epics, use lazy object loading\n\nfor sge in source_group.epics.list(lazy=True):\n    # this epic has the source epic as parent epic?\n    if sge.parent_id == source_epic.id:\n        # Update the parent id\n        sge.parent_id = target_epic.id\n        sge.save()\n\nprint(\"Copied source epic {source_id} ({source_url}) to target epic {target_id} ({target_url})\".format(\n    source_id=source_epic.id, source_url=source_epic.web_url,\n    target_id=target_epic.id, target_url=target_epic.web_url))\n\n# Close the old epic\n\nsource_epic.state_event = 'close'\n\nsource_epic.save()\n\nprint(\"Closed source epic {source_id} ({source_url})\".format(\n    source_id=source_epic.id, source_url=source_epic.web_url))\n\n```\n\n\n\n```shell\n\n$  python3 move_epic_between_groups.py\n\nCopied source epic 725341 (https://gitlab.com/groups/gitlab-de/use-cases/gitlab-api/-/epics/1) to target epic 725358 (https://gitlab.com/groups/gitlab-de/use-cases/gitlab-api/epic-move-target/-/epics/6)\n\nClosed source epic 725341 (https://gitlab.com/groups/gitlab-de/use-cases/gitlab-api/-/epics/1)\n\n```\n\n\n\nThe [target epic](https://gitlab.com/groups/gitlab-de/use-cases/gitlab-api/epic-move-target/-/epics/5) was created and shows the expected result: Same title, description, labels, child epic, and issues.\n\n\n![Target epic which has all attributes copied from the source epic: title, description, labels, child epics, issues](/images/blogimages/efficient-devsecops-workflows-python-gitlab-handson/python_gitlab_moved_epic_with_all_attributes.png){: .shadow}\n\n\n**Exercise**: The script does not copy [comments](https://python-gitlab.readthedocs.io/en/stable/gl_objects/notes.html) and [discussion threads](https://python-gitlab.readthedocs.io/en/stable/gl_objects/discussions.html) yet. Research and help update the script – merge requests welcome!\n\n\n\n### Compliance: Ensure that project settings are not overridden\n\n\nProject and group settings may be accidentally changed by team members with maintainer permissions. Compliance requirements need to be met. Another use case is to manage configuration with Infrastructure as Code tools, and ensure that GitLab instance/group/project/etc. configuration is persisted and always the same. Tools like Ansible or Terraform can invoke an API script, or use the python-gitlab library to perform tasks to manage settings.\n\n\nThe following example only has the `main` branch protected.\n\n\n![GitLab project settings for repositories and protected branches, main branch](/images/blogimages/efficient-devsecops-workflows-python-gitlab-handson/python_gitlab_protected_branches_settings_main.png){: .shadow}\n\n\nLet us assume that a new `production` branch has been added and should be protected, too. The following [script](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/enforce_protected_branches.py) defines the dictionary of protected branches and their access levels for push/merge permissions to maintainer level, and builds the comparison logic around the [python-gitlab protected branches documentation](https://python-gitlab.readthedocs.io/en/stable/gl_objects/protected_branches.html).\n\n\n\n```python\n\n#!/usr/bin/env python\n\n\nimport gitlab\n\nimport os\n\nimport sys\n\n\nGITLAB_SERVER = os.environ.get('GL_SERVER', 'https://gitlab.com')\n\n# https://gitlab.com/gitlab-de/use-cases/\n\nGROUP_ID = os.environ.get('GL_GROUP_ID', 16058698)\n\nGITLAB_TOKEN = os.environ.get('GL_TOKEN')\n\n\nPROTECTED_BRANCHES = {\n    'main': {\n        'merge_access_level': gitlab.const.AccessLevel.MAINTAINER,\n        'push_access_level': gitlab.const.AccessLevel.MAINTAINER\n    },\n    'production': {\n        'merge_access_level': gitlab.const.AccessLevel.MAINTAINER,\n        'push_access_level': gitlab.const.AccessLevel.MAINTAINER\n    },\n}\n\n\nif not GITLAB_TOKEN:\n    print(\"Please set the GL_TOKEN env variable.\")\n    sys.exit(1)\n\ngl = gitlab.Gitlab(GITLAB_SERVER, private_token=GITLAB_TOKEN)\n\n\n# Main\n\ngroup = gl.groups.get(GROUP_ID)\n\n\n# Collect all projects in group and subgroups\n\nprojects = group.projects.list(include_subgroups=True, all=True)\n\n\nfor project in projects:\n    # Retrieve a full manageable project object\n    # https://python-gitlab.readthedocs.io/en/stable/gl_objects/groups.html#examples\n    manageable_project = gl.projects.get(project.id)\n\n    # https://python-gitlab.readthedocs.io/en/stable/gl_objects/protected_branches.html\n    protected_branch_names = []\n\n    for pb in manageable_project.protectedbranches.list():\n        manageable_protected_branch = manageable_project.protectedbranches.get(pb.name)\n        print(\"Protected branch name: {n}, merge_access_level: {mal}, push_access_level: {pal}\".format(\n            n=manageable_protected_branch.name,\n            mal=manageable_protected_branch.merge_access_levels,\n            pal=manageable_protected_branch.push_access_levels\n        ))\n\n        protected_branch_names.append(manageable_protected_branch.name)\n\n    for branch_to_protect, levels in PROTECTED_BRANCHES.items():\n        # Fix missing protected branches\n        if branch_to_protect not in protected_branch_names:\n            print(\"Adding branch {n} to protected branches settings\".format(n=branch_to_protect))\n            p_branch = manageable_project.protectedbranches.create({\n                'name': branch_to_protect,\n                'merge_access_level': gitlab.const.AccessLevel.MAINTAINER,\n                'push_access_level': gitlab.const.AccessLevel.MAINTAINER\n            })\n```\n\n\nRunning the script prints the existing `main` branch, and a note that `production` will be updated. The screenshot from the repository settings proves this action.\n\n\n```\n\n$ python3 enforce_protected_branches.py                                                ─╯\n\nProtected branch name: main, merge_access_level: [{'id': 67294702, 'access_level': 40, 'access_level_description': 'Maintainers', 'user_id': None, 'group_id': None}], push_access_level: [{'id': 68546039, 'access_level': 40, 'access_level_description': 'Maintainers', 'user_id': None, 'group_id': None}]\n\nAdding branch production to protected branches settings\n\n```\n\n\n![GitLab project settings for repositories and protected branches, main and production branch](/images/blogimages/efficient-devsecops-workflows-python-gitlab-handson/python_gitlab_protected_branches_settings_main_production.png){: .shadow}\n\n\n\n### Taking notes, generate due date overview\n\n\nA [Hacker News discussion about note-taking tools](https://news.ycombinator.com/item?id=32155848) inspired me to take a look into creating a Markdown table overview, fetched from files that take notes, and sorted by the parsed due date. The script is located [here](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/generate_snippets_index_by_due_date.py) and more complex to understand.\n\n\n```\n\n# 2022-07-19 Notes\n\n\nHN topic about taking notes: https://news.ycombinator.com/item?id=32152935\n\n\n\u003C!--\n\n---\n\nTags: DevOps, Learn\n\nDue: 2022-08-01\n\n---\n\n-->\n\n\n```\n\n\n### Create issue index in a Markdown file, grouped by labels\n\n\nA similar Hacker News question inspired me to write a [script](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/generate_issue_index_grouped_by_label.py) that parses all issues in a GitLab project by labels, and creates or updates a Markdown index file in the same repository. The issues are grouped by label.\n\n\nFirst, the issues are fetched from the project, including all labels, and stored in the `index` dictionary.\n\n\n```python\n\np = gl.projects.get(PROJECT_ID)\n\n\nlabels = p.labels.list()\n\n\nindex={}\n\n\nfor i in p.issues.list():\n    for l in i.labels:\n        if l not in index:\n            index[l] = []\n\n        index[l].append(\"#{id} - {title}\".format(id=i.id, title=i.title))\n```\n\n\nThe second step is to create a Markdown formatted listing based on the collected index data, with the label name as key, holding a list of issue strings.\n\n\n```python\n\nindex_str = \"\"\"# Issue Overview\n\n_Grouped by issue labels._\n\n\"\"\"\n\n\nfor l_name, i_list in index.items():\n    index_str += \"\\n## {label} \\n\\n\".format(label=l_name)\n\n    for i in i_list:\n        index_str += \"- {title}\\n\".format(title=i)\n```\n\n\nThe last step is to create a new file in the repository, or update an existing one. This is a little tricky because the API expects you to define the action and will throw an error if you try to update a nonexistent file. The first condition checks whether the file path exists in the repository, and then defines the `action` attribute. The `data` dictionary gets built, with the final `commits.create()` method called.\n\n\n```python\n\n# Dump index_str to FILE_NAME\n\n# Create as new commit\n\n# See https://docs.gitlab.com/ce/api/commits.html#create-a-commit-with-multiple-files-and-actions\n\n# for actions detail\n\n\n# Check if file exists, and define commit action\n\nf = p.files.get(file_path=FILE_NAME, ref=REF_NAME)\n\nif not f:\n    action='create'\nelse:\n    action='update'\n\ndata = {\n    'branch': REF_NAME,\n    'commit_message': 'Generate new index, {d}'.format(d=date.today()),\n    'actions': [\n        {\n            'action': action,\n            'file_path': FILE_NAME,\n            'content': index_str\n        }\n    ]\n}\n\n\ncommit = p.commits.create(data)\n\n```\n\n\n## Advanced DevSecOps workflows\n\n\n- [Container images to run API scripts](#container-images-to-run-api-scripts)\n\n- [CI/CD integration: Release and changelog generation](#cicd-integration-release-and-changelog-generation)\n\n- [CI/CD integration: Pipeline report summaries](#cicd-integration-pipeline-report-summaries)\n\n\n### Container images to run API scripts\n\n\nInstalling the Python interpreter and dependent libraries into the operating system may not always work, or it may be a barrier to using the API scripts. A container image that can be pulled from the GitLab registry is a good first step towards more DevSecOps automation and future CI/CD integrations, and provides a tested environment. The python-gitlab project [provides container images](https://python-gitlab.readthedocs.io/en/stable/index.html#using-the-docker-images) which can be used for testing.\n\n\nThe cloned script repository can be mounted into the container, and the settings are configured using environment variables. Example with Docker CLI:\n\n\n```shell\n\n$ docker run -ti -v \"`pwd`:/app\" \\\n  -e \"GL_SERVER=http://gitlab.com\" \\\n  -e \"GL_TOKEN=$GITLAB_TOKEN\" \\\n  -e \"GL_GROUP_ID=16058698\" \\\nregistry.gitlab.com/python-gitlab/python-gitlab:slim-bullseye \\\n\npython /app/python_gitlab_manageable_objects.py\n\n```\n\n\n### CI/CD integration: Release and changelog generation\n\n\nCreating a Git tag and a release in GitLab often requires a changelog attached. This provides a summary into all Git commits, all merged merge requests, or something similar that is easier to consume for everyone interested in the changes in this new release. Automating the changelog generation in CI/CD pipelines is possible using the GitLab API. The simplest list uses the Git commit history shown in the [`create_simple_changelog_from_git_history.py`](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/create_simple_changelog_from_git_history.py) script below:\n\n\n\n```python\n\nproject = gl.projects.get(PROJECT_ID)\n\ncommits = project.commits.list(ref_name='main', lazy=True, iterator=True)\n\n\nprint(\"# Changelog\")\n\n\nfor commit in commits:\n    # Generate a markdown formatted list with URLs\n    print(\"- [{text}]({url}) ({name})\".format(text=commit.title, url=commit.web_url, name=commit.author_name))\n```\n\n\nExecuting the script on the [o11y.love project](https://gitlab.com/everyonecancontribute/observability/o11y.love) will print a Markdown list with URLs.\n\n\n```shell\n\n$ python3 create_changelog_from_git_history.py\n\n# Changelog\n\n- [Merge branch 'topics-ebpf-opentelemetry' into 'main'](https://gitlab.com/everyonecancontribute/observability/o11y.love/-/commit/75df97e13e0f429803dc451aac7fee080a51f44c) (Michael Friedrich)\n\n- [Move eBPF/OpenTelemetry into dedicated topics pages ](https://gitlab.com/everyonecancontribute/observability/o11y.love/-/commit/8fa4233630ff8c1d65aff589bd31c4c2f5df36cb) (Michael Friedrich)\n\n- [Merge branch 'workshop-add-k8s-o11y-toc' into 'main'](https://gitlab.com/everyonecancontribute/observability/o11y.love/-/commit/8b7949b19af6aa6bf25f73ca1ffe8616a7dbaa00) (Michael Friedrich)\n\n- [Add TOC for Kubesimplify Kubernetes Observability workshop ](https://gitlab.com/everyonecancontribute/observability/o11y.love/-/commit/63c8ad587f43e3926e6749a62c33ad0b6f229f47) (Michael Friedrich)\n\n\n...\n\n```\n\n\n**Exercise**: The script is not production ready yet but should get you going to group by commits by Git tag/release, filter merge commits, attach the changelog file or content into the [GitLab release details](https://docs.gitlab.com/ee/api/releases/), etc.\n\n\n### CI/CD integration: Pipeline report summaries\n\n\nWhen developing new API script in Python, a CI/CD integration with automated runs can be desired, too. My recommendation is to focus on writing and testing the script stand-alone on the command line first, and once it works reliably, adapt the code to run the script to perform actions in CI/CD, too. After writing a few scripts, and practicing a lot, you will have learned to write code that can be executed on the CLI, in containers and in CI/CD jobs.\n\n\nA good preparation for CI/CD is to focus on environment variables to configure the script. The environment variables can be defined as CI/CD variables, and there is no extra work with additional configuration files, or command line parameters involved. This keeps the CI/CD configuration footprint small and reusable, too.\n\n\nAn example integration to automatically create security summaries as markdown comment in a merge request was described in the [\"Fantastic Infrastructure-as-Code security attacks and how to find them\" blog post](/blog/fantastic-infrastructure-as-code-security-attacks-and-how-to-find-them/#integrations-into-cicd-and-merge-requests-for-review). This use case required research and testing before actually writing the full API script:\n\n\n1. Read the python-gitlab documentation to learn how [merge request comments (notes)](https://python-gitlab.readthedocs.io/en/stable/gl_objects/notes.html#project-notes) can be created.\n\n2. Create a test project and a test merge request for testing.\n\n3. Start writing code which instantiates the GitLab connection object, fetches the project object, and gets the merge request object from a pre-defined ID.\n\n4. Run `mr.notes.create({‘body’: ‘This is a test by dnsmichi’})`\n\n5. Iterate on the body content and pre-fill a string with a markdown table.\n\n6. Fetch pre-defined CI/CD variables to get the `CI_MERGE_REQUEST_ID` value which will be required to update as target.\n\n6. Verify the API permissions and learn that the CI job token is not sufficient.\n\n7. Implement the full algorithm, integrated CI/CD testing and add documentation.\n\n\nThe script runs continuously after security scans have been completed with a report. Another use case can be using [Pipeline schedules](https://docs.gitlab.com/ee/ci/pipelines/schedules.html) which provide synchronization capabilities, and the comments get posted to an issue summary.\n\n\n## Development tips\n\n\nCode and abstraction libraries are helpful but sometimes it can be hard to see the problem why an attribute or object does not provide the expected behavior. It is helpful to take a step back, and look into different ways to fetch data from the REST API, for example [using jq and curl](/blog/devops-workflows-json-format-jq-ci-cd-lint/). The [GitLab CLI](/blog/introducing-the-gitlab-cli/) can also be used to query the API and get immediate results.\n\n\nDeveloping scripts that interact with APIs can become a repetitive task, adding more needed attributes, and the need to learn about object relations, methods and how to store the retrieved data. Especially for larger datasets, it can be a good idea to use the JSON library to dump data structures into a file cache on disk, and provide a debug configuration option to read the data from that file, instead of firing the API requests again all the time. This also helps to mitigate potential rate limiting.\n\n\nAdding timing points to the code can help measure the performance, and efficiency of the algorithm used. The following snippet [measures the duration](https://stackoverflow.com/questions/7370801/how-do-i-measure-elapsed-time-in-python ) of requests to retrieve the merge request status. It is part of a script that was used to analyze a potential problem with the `detailed_merge_status` attribute in [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/386661#note_1237757295).\n\n\n```\n\nmrs = project.mergerequests.list(state='opened', iterator=True, with_merge_status_recheck=True)\n\n\nfor mr in mrs:\n    start = timer()\n    #print(mr.attributes) #debug\n    # https://docs.gitlab.com/ee/api/merge_requests.html#list-merge-request-diffs\n    real_mr = project.mergerequests.get(mr.get_id())\n\n    print(\"- [ ] !{mr_id} - {mr_url}+ Status: {s}, Title: {t}\".format(\n        mr_id=real_mr.attributes['iid'],\n        mr_url=real_mr.attributes['web_url'],\n        s=real_mr.attributes['detailed_merge_status'],\n        t=real_mr.attributes['title']))\n\n    end = timer()\n    duration = end - start\n    if duration > 1.0:\n        print(\"ALERT: > 1s \")\n    print(\"> Execution time took {s}s\".format(s=(duration)))\n```\n\n\nMore tips are discussed in the following sections:\n\n\n- [Advanced custom configuration](#advanced-custom-configuration)\n\n- [CI/CD code linting for different Python versions](#cicd-code-linting-for-different-python-versions)\n\n\n### Advanced custom configuration\n\n\nWhen you are developing a script that requires advanced custom configuration, choose a format that fits best into existing infrastructure and development guidelines. Python provides libraries for parsing YAML, JSON, etc. The following example configuration file and script showcase a YAML configuration option. It is based on [a script that automatically updates a list of issues/epics](https://gitlab.com/gitlab-de/gitlab-api-automated-commenter) with a comment, reminding responsible team members for a recurring update for a cross-functional initiative at GitLab.\n\n\n[python_gitlab_custom_yaml_config.yml](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/python_gitlab_custom_yaml_config.yml)\n\n```yaml\n\ntasks:\n  - name: \"Backend\"\n    url: \"https://gitlab.com/group1/project2/-/issues/1\"\n  - name: \"Frontend\"\n    url: \"https://gitlab.com/group2/project4/-/issues/2\"\n```\n\n\n[python_gitlab_custom_script_config_yaml.py](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/python_gitlab_custom_script_config_yaml.py)\n\n```python\n\nimport os\n\nimport yaml\n\n\nCONFIG_FILE = os.environ.get('GL_CONFIG_FILE', \"python_gitlab_custom_yaml_config.yml\")\n\n\n# Read config\n\nwith open(CONFIG_FILE, mode=\"rt\", encoding=\"utf-8\") as file:\n    config = yaml.safe_load(file)\n    #print(config) #debug\n\ntasks = []\n\nif \"tasks\" in config:\n    tasks = config['tasks']\n\n# Process the tasks\n\nfor task in tasks:\n    print(\"Task name: '{n}' Issue URL to update: {id}\".format(n=task['name'], id=task['url']))\n    # print(task) #debug\n```\n\n\n```shell\n\n$ python3 python_gitlab_custom_script_config_yaml.py                                     ─╯\n\nTask name: 'Backend' Issue URL to update: https://gitlab.com/group1/project2/-/issues/1\n\nTask name: 'Frontend' Issue URL to update: https://gitlab.com/group2/project4/-/issues/2\n\n```\n\n\n\n### CI/CD code linting for different Python versions\n\n\nAll code examples in this blog post have been tested with Python 3.8, 3.9, 3.10 and 3.11, using [parallel matrix builds in GitLab CI/CD](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/.gitlab-ci.yml) and pyflakes for code linting. Automating the tests helps focus on development, and ensuring that the target platforms support the language features. Some Linux distributions do not provide Python 3.11 yet for example, and Python language features cannot be used or may need an alternative implementation.\n\n\n```yaml\n\ninclude:\n  - template: Security/SAST.gitlab-ci.yml\n  - template: Dependency-Scanning.gitlab-ci.yml\n  - template: Secret-Detection.gitlab-ci.yml\n\nstages:\n  - lint\n  - test\n\n.python-req:\n  image: python:$VERSION\n  script:\n    - pip install -r requirements_dev.txt\n  parallel:\n    matrix:\n      - VERSION: ['3.8', '3.9', '3.10', '3.11']   # https://hub.docker.com/_/python\n\nlint-python:\n  extends: .python-req\n  stage: lint\n  script:\n    - !reference [.python-req, script]\n    - pyflakes .\n\nsast:\n  stage: test\n\n```\n\n\n## Optimize code and performance\n\n\n- [Lazy objects](#lazy-objects)\n\n- [Object-oriented programming](#object-oriented-programming)\n\n\n### Lazy objects\n\n\nWhen working with objects that do not immediately need all attributes loaded, you can specify the [`lazy=True`](https://python-gitlab.readthedocs.io/en/stable/api-usage.html#lazy-objects) attribute to not invoke an API call immediately. A follow-up method call will then invoke the required API calls.\n\n\n\n```python\n\n# Lazy object, no API call\n\nproject = gl.projects.get(PROJECT_ID, lazy=True)\n\n\ntry:\n    print(\"Trying to access 'snippets_enabled' on a lazy loaded project object. This will throw an exception that we capture.\")\n    print(\"Project settings: snippets_enabled={b}\".format(b=project.snippets_enabled))\nexcept Exception as e:\n    print(\"Accessing lazy loaded object failed: {e}\".format(e=e))\n\nproject.snippets_enabled = True\n\n\nproject.save() # This creates an API call\n\n\nprint(\"\\nLazy object was loaded after save() call.\")\n\nprint(\"Project settings: snippets_enabled={b}\".format(b=project.snippets_enabled))\n\n\n```\n\n\nExecuting the [`python_gitlab_lazy_objects.py`](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/python_gitlab_lazy_objects.py) script shows that the lazy object did not fire an API call, thus throwing an exception when accessing the project setting `snippets_enabled`. To show that the object still can be managed, the code catches the exception to proceed with updating the setting locally, and calling `project.save()` to persist the change and call the API update.\n\n\n```shell\n\n$ python3 python_gitlab_lazy_objects.py                                                ─╯\n\nTrying to access 'snippets_enabled' on a lazy loaded project object. This will throw an exception that we capture.\n\nAccessing lazy loaded object failed: 'Project' object has no attribute 'snippets_enabled'\n\n\nIf you tried to access object attributes returned from the server,\n\nnote that \u003Cclass 'gitlab.v4.objects.projects.Project'> was created as\n\na `lazy` object and was not initialized with any data.\n\n\nLazy object was loaded after save() call.\n\nProject settings: snippets_enabled=True\n\n```\n\n\n### Object-oriented programming\n\n\nFor better code quality, it makes sense to follow object-oriented programming and create classes that store attributes, provide methods, and enable better unit testing. The [storage analyzer tool](https://gitlab.com/gitlab-de/gitlab-storage-analyzer) was developed to create a summary of projects that consume lots storage, for example CI/CD job artifacts. By inspecting the [Git history](https://gitlab.com/gitlab-de/gitlab-storage-analyzer/-/commits/main), you can learn from the different iterations to a first working version.\n\n\nThe following example is a trimmed version which shows how to initialize the class `GitLabUseCase`, add helper functions for logging and JSON pretty-printing, and print all project attributes.\n\n\n```python\n\n#!/usr/bin/env python\n\n\nimport gitlab\n\nimport os\n\nimport sys\n\nimport json\n\n\n# Print an error message with prefix, and exit immediately with an error code.\n\ndef error(text):\n    logger(\"ERROR\", text)\n    sys.exit(1)\n\n# Log a line with a given prefix (e.g. INFO)\n\ndef logger(prefix, text):\n    print(\"{prefix}: {text}\".format(prefix=prefix, text=text))\n\n# Return a pretty-printed JSON string with indent of 4 spaces\n\ndef render_json_output(data):\n    return json.dumps(data, indent=4, sort_keys=True)\n\n# Class definition\n\nclass GitLabUseCase(object):\n    # Initializer to set all required parameters\n    def __init__(self, verbose, gl_server, gl_token, gl_project_id):\n        self.verbose = verbose\n        self.gl_server = gl_server\n        self.gl_token = gl_token\n        self.gl_project_id = gl_project_id\n\n    # Debug logger, controlled via verbose parameter\n    def log_debug(self, text):\n        if self.verbose:\n            print(\"DEBUG: {d}\".format(d=text))\n\n    # Connect to the GitLab server and store the connection handle\n    def connect(self):\n        self.log_debug(\"Connecting to GitLab API at {s}\".format(s=self.gl_server))\n        # Supports personal/project/group access token\n        # https://docs.gitlab.com/ee/api/index.html#personalprojectgroup-access-tokens\n        self.gl = gitlab.Gitlab(self.gl_server, private_token=self.gl_token)\n\n    # Use the stored connection handle to fetch a project object by id,\n    # and print its attribute with JSON pretty-print.\n    def print_project_attributes(self):\n        project = self.gl.projects.get(self.gl_project_id)\n        print(render_json_output(project.attributes))\n\n\n## main\n\nif __name__ == '__main__':\n    # Fetch configuration from environment variables.\n    # The second parameter specifies the default value when not provided.\n    gl_verbose = os.environ.get('GL_VERBOSE', False)\n    gl_server = os.environ.get('GL_SERVER', 'https://gitlab.com')\n\n    gl_token = os.environ.get('GL_TOKEN')\n\n    if not gl_token:\n        error(\"Please specifiy the GL_TOKEN env variable\")\n\n    gl_project_id = os.environ.get('GL_PROJECT_ID', 42491852) # https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python\n\n    # Instantiate new object and run methods\n    gl_use_case = GitLabUseCase(gl_verbose, gl_server, gl_token, gl_project_id)\n    gl_use_case.connect()\n    gl_use_case.print_project_attributes()\n```\n\n\nRunning the [script](https://gitlab.com/gitlab-de/use-cases/gitlab-api/gitlab-api-python/-/blob/main/python_gitlab_oop_helpers.py) with the `GL_PROJECT_ID` environment variable pretty-prints the project attributes as JSON on the terminal.\n\n\n![Example script that pretty-prints the project object attributes as JSON](/images/blogimages/efficient-devsecops-workflows-python-gitlab-handson/python_gitlab_oop_example_terminal_output_project_attributes.png){: .shadow}\n\n\n## More use cases\n\n\nBetter performance with API requests can be achieved by looking into parallelization and threading in Python. Users have been testing the storage analyzer script, and provided feedback to optimize the performance for the single-threaded script by using tasks and [Python threading](https://realpython.com/intro-to-python-threading/), similar to [this community project](https://gitlab.com/thelabnyc/gitlab-storage-cleanup). I might follow up on this topic in a future blog post, there are many more great use cases to cover using python-gitlab.\n\n\nThere is so much more to learn, here are a few examples from the GitLab community forum that could not make it into this blog post:\n\n\n* [Fetch review app environment URL from Merge Request](https://forum.gitlab.com/t/fetch-review-app-environment-url-from-merge-request/71335/2)\n\n* [Project visibility, project features, permissions](https://forum.gitlab.com/t/project-visibility-project-features-permissions-settings-api/32242)\n\n* [Download GitLab CI/CD job artifacts using Python](https://forum.gitlab.com/t/download-gitlab-ci-jobs-artifacts-using-python/25436/$)\n\n\n## Conclusion\n\n\nThe python-gitlab library helps to abstract raw REST API calls, and to keep access to attributes, functions and objects short and relatively easy. There are many use cases that can be solved efficiently. Alternative programming language libraries for the GitLab REST API are available [in the API clients section here](/partners/technology-partners/#api-clients).\n\n\nThe [GitLab Community Forum](https://forum.gitlab.com/) is a great place to collaborate on use cases and questions about possible solutions or code snippets. We'd love to hear from you about your use cases and challenges using the python-gitlab library.\n\n\nShoutout to the python-gitlab maintainers and contributors, developing this fantastic API library for many years now! If this blog post and the python-gitlab library helped you get more efficient, please consider [contributing to python-gitlab](https://python-gitlab.readthedocs.io/en/stable/#contributing). When there is a GitLab API feature missing, look into [contributing to GitLab](https://about.gitlab.com/community/contribute/), too. Thank you!\n\n\n\nCover image by [David Clode](https://unsplash.com/@davidclode) on [Unsplash](https://unsplash.com/photos/cxMJYcuCLEA)\n\n{: .note}\n",[233,719,951,9],{"slug":1522,"featured":6,"template":698},"efficient-devsecops-workflows-hands-on-python-gitlab-api-automation","content:en-us:blog:efficient-devsecops-workflows-hands-on-python-gitlab-api-automation.yml","Efficient Devsecops Workflows Hands On Python Gitlab Api Automation","en-us/blog/efficient-devsecops-workflows-hands-on-python-gitlab-api-automation.yml","en-us/blog/efficient-devsecops-workflows-hands-on-python-gitlab-api-automation",{"_path":1528,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1529,"content":1535,"config":1544,"_id":1546,"_type":14,"title":1547,"_source":16,"_file":1548,"_stem":1549,"_extension":19},"/en-us/blog/efficient-devsecops-workflows-with-rules-for-conditional-pipelines",{"title":1530,"description":1531,"ogTitle":1530,"ogDescription":1531,"noIndex":6,"ogImage":1532,"ogUrl":1533,"ogSiteName":685,"ogType":686,"canonicalUrls":1533,"schema":1534},"DevSecOps workflows with conditional CI/CD pipeline rules","CI/CD pipelines can be simple or complex, what makes them efficient are CI rules that define when and how they run.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749669673/Blog/Hero%20Images/engineering.png","https://about.gitlab.com/blog/efficient-devsecops-workflows-with-rules-for-conditional-pipelines","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How to create efficient DevSecOps workflows with rules for conditional CI/CD pipelines\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Abubakar Siddiq Ango\"}],\n        \"datePublished\": \"2023-06-27\",\n      }",{"title":1536,"description":1531,"authors":1537,"heroImage":1532,"date":1539,"body":1540,"category":928,"tags":1541},"How to create efficient DevSecOps workflows with rules for conditional CI/CD pipelines",[1538],"Abubakar Siddiq Ango","2023-06-27","CI/CD pipelines can be simple or complex – what makes them efficient are\nrules that define when and how they run. By using rules, you create smarter\nCI/CD pipelines, which increase teams' productivity and allow organizations\nto iterate faster. In this tutorial, you will learn about the different\ntypes of CI/CD pipelines and rules and their use cases.\n\n\n## What is a pipeline?\n\nA pipeline is a top-level component of [continuous\nintegration](https://docs.gitlab.com/ee/ci/introduction/index.html#continuous-integration)\nand [continuous\ndelivery](https://docs.gitlab.com/ee/ci/introduction/index.html#continuous-delivery)/[continuous\ndeployment](https://docs.gitlab.com/ee/ci/introduction/index.html#continuous-deployment),\nand it comprises [jobs](https://docs.gitlab.com/ee/ci/jobs/index.html),\nwhich are lists of tasks to be executed. Jobs are organized in\n[stages](https://docs.gitlab.com/ee/ci/yaml/index.html#stages), which define\nwhen the jobs run.\n\n\nA pipeline can be a [basic\none](https://docs.gitlab.com/ee/ci/pipelines/pipeline_architectures.html#basic-pipelines)\nin which jobs run concurrently in each stage. Pipelines can also be complex,\nlike [parent-child\npipelines](https://docs.gitlab.com/ee/ci/pipelines/downstream_pipelines.html#parent-child-pipelines),\n[merge trains](https://docs.gitlab.com/ee/ci/pipelines/merge_trains.html),\n[multi-project\npipelines](https://docs.gitlab.com/ee/ci/pipelines/downstream_pipelines.html#multi-project-pipelines),\nor the more advanced [Directed Acyclic Graph\npipelines](https://docs.gitlab.com/ee/ci/directed_acyclic_graph/index.html)\n(DAG).\n\n\n![Complex pipeline showing\ndependencies](https://about.gitlab.com/images/blogimages/2023-06-15-efficient-devsecops-workflows-with-rules-for-conditional-pipelines/complex-pipelines.png)\n\n\nA [gitlab-runner\npipeline](https://gitlab.com/gitlab-org/gitlab-runner/-/pipelines/798871212/)\nshowing job dependencies.\n\n{: .note.text-center}\n\n\n![Directed Acyclic\nGraph](https://about.gitlab.com/images/blogimages/2023-06-15-efficient-devsecops-workflows-with-rules-for-conditional-pipelines/dag-pipelines.png)\n\n\nDirected Acyclic Graph pipeline\n\n{: .note.text-center}\n\n\nUse cases determine how complicated a pipeline can get. A use case might\nrequire testing an application and packaging it into a container; the\npipeline can even further deploy the container to an orchestrator like\nKubernetes or a container registry. Another use case might involve building\napplications that target different platforms with varying dependencies,\nwhich is where DAG pipelines shine.\n\n\n## What are CI/CD rules?\n\nCI/CD rules are the key to managing the flow of jobs in a pipeline. One of\nthe powerful features of GitLab CI/CD is the ability to control when a CI/CD\njob runs, which can depend on context, changes made,\n[workflow](https://docs.gitlab.com/ee/ci/yaml/workflow.html) rules, values\nof CI/CD variables, or custom conditions. Aside from using `rules`, you can\nalso control the flow of CI/CD pipelines using:\n\n\n* [`needs`](https://docs.gitlab.com/ee/ci/yaml/index.html#needs):\nestablishes relationships between jobs and used in DAG pipelines\n\n* [`only`](https://docs.gitlab.com/ee/ci/yaml/index.html#only--except):\ndefines when a job should run\n\n* [`except`](https://docs.gitlab.com/ee/ci/yaml/index.html#only--except):\ndefines when a job should not run\n\n* [`workflow`](https://docs.gitlab.com/ee/ci/yaml/workflow.html): controls\nwhen pipelines are created\n\n\n`only` and `except` should not be used with `rules` as this can lead to\nunexpected behavior. It is recommended to use `rules`, learn more in the\nfollowing sections.\n\n\n## What is the `rules` feature?\n\n`rules` determine when and if a job runs in a pipeline. If you have multiple\nrules defined, they are all evaluated in order until a matching rule is\nfound and the job is executed according to the specified configuration.\n\n\n[Rules](https://docs.gitlab.com/ee/ci/yaml/#rules) can be defined using the\nkeywords: `if`, `changes`, `exists`, `allow_failure`, `variables`, `when`\nand `needs`.\n\n\n### `rules:if`\n\nThe `if` keyword evaluates if a job should be added to a pipeline. The\nevaluation is done based on the values of [CI/CD\nvariables](https://docs.gitlab.com/ee/ci/variables/index.html) defined in\nthe scope of the job or pipeline and [predefined CI/CD\nvariables](https://docs.gitlab.com/ee/ci/variables/predefined_variables.html).\n\n\n```yaml\n\njob:\n  script:\n    - echo $(date)\n  rules:\n    - if: $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME == $CI_DEFAULT_BRANCH\n```\n\n\nIn the CI/CD script above, the job prints the current date and time with the\n`echo` command. The job is only executed if the source branch of a merge\nrequest (`CI_MERGE_REQUEST_SOURCE_BRANCH_NAME`) is the same as the project's\ndefault branch (`CI_DEFAULT_BRANCH`) in a [merge request\npipeline](https://docs.gitlab.com/ee/ci/pipelines/merge_request_pipelines.html).\nYou can use the `==` and `!=` operators for comparison, while `=~` and `!~`\nallow you to compare a variable to a regular expression. You can combine\nmultiple expressions using the `&&` (AND), `||` (OR) operators, and\nparentheses for grouping expressions.\n\n\n### `rules:changes`\n\nWith the `changes` keyword, you can watch for changes to certain files or\nfolders for a job to execute. GitLab uses the output of [Git\ndiffstat](https://git-scm.com/docs/git-diff#Documentation/git-diff.txt\n\n\n```yaml\n\njob:\n  script:\n    - terraform plan\n  rules:\n    - if: $CI_PIPELINE_SOURCE == \"merge_request_event\"\n      changes:\n        - terraform/**/*.tf\n```\n\n\nIn this example, the `terraform plan` is only executed when files with the\n`.tf` extension are changed in the `terraform` folder and its\nsubdirectories. An additional rule ensures the job is executed for [merge\nrequest\npipelines](https://docs.gitlab.com/ee/ci/pipelines/merge_request_pipelines.html).\n\n\nThe `changes` rule can look for changes in specific files with `paths`:\n\n\n```yaml\n\njob:\n  script:\n    - terraform plan\n  rules:\n    - if: $CI_PIPELINE_SOURCE == \"merge_request_event\"\n      changes:\n        paths:\n          - terraform/main.tf\n```\n\n\nChanges to files in a source reference (branch, tag, commit) can also be\ncompared against other references in the Git repository. The CI/CD job will\nonly execute when the source reference differs from the [specified reference\nvalue defined in\n`rules:changes:compare_to`](https://docs.gitlab.com/ee/ci/yaml/#ruleschangescompare_to).\nThis value can be a Git commit SHA, tag, or branch name. The following\nexample compares the source reference to the current `production` branch\n(`refs/head/production`).\n\n\n```yaml\n\njob:\n  script:\n    - terraform plan\n  rules:\n    - if: $CI_PIPELINE_SOURCE == \"merge_request_event\"\n      changes:\n        paths:\n          - terraform/main.tf\n        compare_to: 'refs/head/production'\n```\n\n\n### `rules:exists`\n\nLike `changes`, you can execute CI/CD jobs only when specific files exist\n[using `rules:exists`\nrules](https://docs.gitlab.com/ee/ci/yaml/#rulesexists). For example, you\ncan run a job that checks whether a `Gemfile.lock` file exists. The\nfollowing example audits a Ruby project for vulnerable versions of gems or\ninsecure gem sources using the [bundler-audit\nproject](https://github.com/rubysec/bundler-audit).\n\n\n```yaml\n\njob:\n  script:\n    - bundle-audit check --format json --output bundle-audit.json\n  rules:\n    - if: $CI_PIPELINE_SOURCE == \"merge_request_event\"\n      changes:\n        exits:\n          - Gemfile.lock\n```\n\n\n### `rules:allow_failure`\n\nThere are scenarios where the failure of a job should not affect the\nfollowing jobs and stages of the pipeline. This can be useful in use cases\nwhere non-blocking tasks are required as part of a project but don't impact\nthe project in any way. The [`rules:allow_failure`\nrule](https://docs.gitlab.com/ee/ci/yaml/#rulesallow_failure) can be set to\n`true` or `false`. It defaults to `false` implicitly when the rule is not\nspecified.\n\n\n```yaml\n\njob:\n  script:\n    - bundle-audit check --format json --output bundle-audit.json\n  rules:\n    - if: $CI_PIPELINE_SOURCE == \"merge_request_event\" && $CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED == \"false\"\n      changes:\n        exits:\n          - Gemfile.lock\n      allow_failure: true\n```\n\n\nIn this example, the job can fail only if a merge request event triggers the\npipeline and the target branch is not protected.\n\n\n### `rules:needs`\n\nDisabled by fault,\n[`rules:needs`](https://docs.gitlab.com/ee/ci/yaml/#rulesneeds) was\nintroduced in [GitLab\n16](https://about.gitlab.com/releases/2023/05/22/gitlab-16-0-released/) and\ncan be enabled with the `introduce_rules_with_needs` [feature\nflag](https://docs.gitlab.com/ee/user/feature_flags.html).\n[`needs`](https://docs.gitlab.com/ee/ci/yaml/index.html#needs) is used to\nexecute jobs out of order without waiting for other jobs in a stage to\ncomplete. When used with `rules`, it replaces the job's `needs`\nspecification when the set conditions are met.\n\n\n```yaml\n\nstages:\n  - build\n  - qa\n  - deploy\n\nbuild-dev:\n  stage: build\n  rules:\n    - if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH\n  script: echo \"Building dev version...\"\n\nbuild-prod:\n  stage: build\n  rules:\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH\n  script: echo \"Building production version...\"\n\nqa-checks:\n  stage: qa\n  script:\n    - echo \"Running QA checks before publishing to Production....\"\n\ndeploy:\n  stage: deploy\n  needs: ['build-dev']\n  rules:\n    - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH\n      needs: ['build-prod', 'qa-checks']\n    - when: on_success # Run the job in other cases\n  script: echo \"Deploying application.\"\n\n```\n\n\nIn the example above, the deploy job has the `build-dev` job as a dependency\nbefore it runs; however, when the commit branch is the project's default\nbranch, its dependency changes to `build-prod` and `qa-checks`. This can\nallow for extra checks to be implemented based on context.\n\n\n### `rules:variables`\n\nIn some situations, you only need certain variables in specific conditions,\nor their values change based on content; you can use the\n[`rules:variables`](https://docs.gitlab.com/ee/ci/yaml/#rulesvariables) rule\nto define variables when specific conditions are met. This also allows to\ncreate more dynamic CI/CD execution workflows.\n\n\n```\n\njob:\n  variables:\n    DEPLOY_VERSION: \"dev\"\n  rules:\n    - if: $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH\n      variables:\n        DEPLOY_VERSION: \"stable\"\n  script:\n    - echo \"Deploying $DEPLOY_VERSION version\"\n```\n\n\n### `workflow:rules`\n\nSo far, we have looked at controlling when jobs run in a pipeline using the\n`rules` keyword. Sometimes, you want to control how the entire pipeline\nbehaves: That's where [`workflow:rules` provide a powerful\noption](https://docs.gitlab.com/ee/ci/yaml/#workflowrules). `workflow:rules`\nare evaluated before jobs and take precedence over the job rules. For\nexample, if a job has rules that allow it to run against a specific branch,\nbut the workflow rules set jobs running against the branch to `when: never`,\nthe jobs will not run.\n\n\nAll the features of `rules` mentioned in the previous sections work for\n`workflow:rules`.\n\n\n```yaml\n\nworkflow:\n  rules:\n    - if: $CI_PIPELINE_SOURCE == \"schedule\"\n      when: never\n    - if: $CI_PIPELINE_SOURCE == \"push\"\n      when: never\n    - when: always\n```\n\n\nIn the example above, the CI/CD pipeline runs except when a schedule or push\nevent is triggered.\n\n\n## Use cases for CI/CD rules\n\nIn the previous section, we looked at different ways of using the `rules`\nfeature of GitLab CI/CD. In this section, we will explore practical use\ncases.\n\n\n### Developer experience\n\nOne of the benefits of a DevSecOps platform is to allow developers to focus\non what they do best: writing their code and doing as little operations as\npossible. A company's DevOps or Platform team can create CI/CD templates for\ndifferent stages of their development lifecycle and use rules to add CI/CD\njobs to handle specific tasks based on their technology stack. A developer\nonly needs to include a default CI/CD script and pipelines are automatically\ncreated based on files detected, refs used, or defined variables, leading to\nincreased productivity.\n\n\n### Security and quality assurance\n\nA major function of CI/CD pipelines is to catch bugs or vulnerabilities\nbefore they are deployed into production infrastructure. Using CI/CD rules,\nsecurity and quality assurance teams can dynamically run extra checks on\nchanges introduced when certain factors are introduced. For example, malware\nscans can be added when new file extensions not in an approved list are\ndetected, or more advanced performance tests are automatically added when a\ncertain level of change has been introduced to the codebase. With GitLab's\nbuilt-in security, including security in your pipelines can be done with\njust a few lines of code.\n\n\n```yaml\n\ninclude:\n  # Static\n  - template: Jobs/Container-Scanning.gitlab-ci.yml\n  - template: Jobs/Dependency-Scanning.gitlab-ci.yml\n  - template: Jobs/SAST.gitlab-ci.yml\n  - template: Jobs/Secret-Detection.gitlab-ci.yml\n  - template: Jobs/SAST-IaC.gitlab-ci.yml\n  - template: Jobs/Code-Quality.gitlab-ci.yml\n  - template: Security/Coverage-Fuzzing.gitlab-ci.yml\n  # Dynamic\n  - template: Security/DAST.latest.gitlab-ci.yml\n  - template: Security/BAS.latest.gitlab-ci.yml\n  - template: Security/DAST-API.latest.gitlab-ci.yml\n  - template: API-Fuzzing.latest.gitlab-ci.yml\n```\n\n\n### Automation\n\nThe power of CI/CD rules shines through in the (nearly) limitless\npossibilities of automating your CI/CD pipelines. GitLab\n[AutoDevOps](https://docs.gitlab.com/ee/topics/autodevops/) is an example.\nIt uses an opinionated best-practice collection of [GitLab CI/CD\ntemplates](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/gitlab/ci/templates)\nand rules to detect the technology stack used. AutoDevOps creates relevant\njobs that take your application all the way to production from a push. You\ncan review the [AutoDevOps\ntemplate](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml)\nto learn how it leverages CI/CD rules for greater efficiency.\n\n\n### Using CI/CD components\n\nGrowth comes with several iterations of work and creating best practices.\nWhile building CI/CD pipelines, your DevOps team would have made several\nCI/CD scripts that they repurpose across pipelines using the\n[`include`](https://docs.gitlab.com/ee/ci/yaml/#include) keyword. In [GitLab\n16](https://about.gitlab.com/releases/2023/05/22/gitlab-16-0-released/),\nGitLab [introduced CI/CD\nComponents](https://about.gitlab.com/releases/2023/05/22/gitlab-16-0-released/#cicd-components),\nan experimental feature that allows your team to create reusable CI/CD\ncomponents and publish them as a catalog that can be used to build smarter\nCI/CD pipelines rapidly. You can learn more [about using CI/CD\ncomponents](https://docs.gitlab.com/ee/ci/components/) and the [component\ncatalog\ndirection](https://about.gitlab.com/direction/verify/component_catalog/).\n\n\nGitLab CI/CD enables you to run smarter pipelines, and it does so together\nwith [GitLab Duo, AI-powered workflows](/gitlab-duo/) to help you build more\nsecure software, faster.\n",[719,1542,1543,951,9],"CI","CD",{"slug":1545,"featured":6,"template":698},"efficient-devsecops-workflows-with-rules-for-conditional-pipelines","content:en-us:blog:efficient-devsecops-workflows-with-rules-for-conditional-pipelines.yml","Efficient Devsecops Workflows With Rules For Conditional Pipelines","en-us/blog/efficient-devsecops-workflows-with-rules-for-conditional-pipelines.yml","en-us/blog/efficient-devsecops-workflows-with-rules-for-conditional-pipelines",{"_path":1551,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1552,"content":1557,"config":1564,"_id":1566,"_type":14,"title":1567,"_source":16,"_file":1568,"_stem":1569,"_extension":19},"/en-us/blog/embedded-views-the-future-of-work-tracking-in-gitlab",{"config":1553,"ogImage":1554,"title":1555,"description":1556},{"noIndex":6},"https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099072/Blog/Hero%20Images/Blog/Hero%20Images/agile_agile.png_1750099072322.png","Embedded views: The future of work tracking in GitLab","Learn how embedded views, powered by GitLab Query Language, help GitLab teams work more efficiently, make data-driven decisions, and maintain visibility across complex workflows.",{"title":1555,"description":1556,"authors":1558,"date":1561,"body":1562,"category":1053,"tags":1563,"heroImage":1554},[1559,1560,1092],"Matthew Macfarlane","Himanshu Kapoor","2025-08-21","Ever find yourself switching between tabs in GitLab just to keep track of\nwhat’s happening in your project? Maybe you’re checking on an issue, then\njumping to a merge request, then over to an epic to see how everything\nconnects. Before you know it, you’ve got a browser full of tabs and you’ve\nlost your train of thought.\n\nIf that sounds familiar, you’re definitely not alone. So many teams waste time and energy flipping through various items in their project management software, just trying to get a handle on their work.\n\nThat's why we created [embedded views](https://docs.gitlab.com/user/glql/#embedded-views), powered by [GitLab Query Language (GLQL)](https://docs.gitlab.com/user/glql/). With embedded views, [available in 18.3](https://about.gitlab.com/releases/2025/08/21/gitlab-18-3-released/), you get live, relevant information right where you’re already working in GitLab. No more endless context switching. No more outdated reports. Just the info you need, right when you need it.\n\n## Why embedded views matter\n\nEmbedded views are more than just a new feature, they're a fundamental shift in how teams understand and track their work within GitLab. With embedded views, teams can maintain context while accessing real-time information, creating shared understanding, and improving collaboration without ever leaving their current workflow. It’s about making work tracking feel natural and effortless, so you can focus on what matters.\n\n## How it works: Real-time data right where you need it the most\n\nEmbedded views let you insert live GLQL queries in Markdown code blocks throughout wiki pages, epics, issues, and merge requests. Here's what makes them so useful:\n\n### Always up to date\n\nGLQL queries are dynamic, pulling fresh data each time the page loads, so your embedded views always reflect the current state of your work, not the state when you embedded the view. When changes happen to issues, merge requests, or milestones, a page refresh will show those updates in your embedded view.\n\n### Contextual awareness\n\nUse functions like `currentUser()` and `today()` to make queries context-specific. Your embedded views automatically adapt to show relevant information for whoever is viewing them, creating personalized experiences without manual configuration.\n\n### Powerful filtering\n\nFilter by fields like assignee, author, label, milestone, health status, creation date, and more. Use logical expressions to get exactly the data you want. We support more than 30 fields as of 18.3.\n\n### Customizable display\n\nYou can display your data as a table, a list, or a numbered list. Choose which fields to show, set a limit on the number of items, and specify the sort order to keep your view focused and actionable.\n\n### Availability\n\nYou can use embedded views in group and project wikis, epic and issue descriptions, merge requests, and comments. GLQL is available across all GitLab tiers: Free, Premium, and Ultimate, on GitLab.com, GitLab Self-Managed, and GitLab Dedicated. Certain functionality, such as displaying epics, status, custom fields, iterations, and weights, is available in the Premium and Ultimate tiers. Displaying health status is available only in Ultimate.\n\n## See embedded views in action\n\nThe syntax of an embedded view's source is a superset of YAML that consists of:\n\n* The `query` parameter: Expressions joined together with a logical operator, such as `and`.\n* Parameters related to the presentation layer, like `display`, `limit`, or `fields`, `title`, and `description`\n  represented as YAML.\n\nA view is defined in Markdown as a code block, similar to other code blocks like Mermaid.\n\nFor example:\n\n> Display a table of first 5 open issues assigned to the authenticated user in `gitlab-org/gitlab`.\n> Display columns `title`, `state`, `health`, `description`, `epic`, `milestone`, `weight`, and `updated`.\n\n````yaml\n```glql\ndisplay: table\ntitle: GLQL table 🎉\ndescription: This view lists my open issues\nfields: title, state, health, epic, milestone, weight, updated\nlimit: 5\nquery: project = \"gitlab-org/gitlab\" AND assignee = currentUser() AND state = opened\n```\n````\n\nThis source should render a table like the one below:\n\n![](https://res.cloudinary.com/about-gitlab-com/image/upload/v1755193172/ibzfopvpztpglnccwrjj.png)\n\nAn easy way to create your first embedded view is to navigate to the **More options** dropdown in the rich text editor toolbar. Once in this toolbar, select **Embedded view**, which populates the following query in a Markdown code block:\n\n````yaml\n```glql\nquery: assignee = currentUser()\nfields: title, createdAt, milestone, assignee\ntitle: Issues assigned to current user\n```\n````\n\nSave your changes to the comment or description where the code block appears, and you're done! You've successfully created your first embedded view!\n\n## How GitLab uses embedded views\n\nWhether tracking merge requests targeting security releases, triaging bugs to improve backlog hygiene, or managing team onboarding and milestone planning, we rely on embedded views for mission-critical processes every day. This isn't just a feature we built, it's a tool we depend on to run our business effectively. When you adopt embedded views, you're getting a tested solution that's already helping GitLab teams work more efficiently, make data-driven decisions, and maintain visibility across complex workflows. Simply stated, embedded views can transform how your team accesses and analyzes the work that matters most to your success.\n\nTo learn and see more about how GitLab is using embedded views internally, check out [How GitLab measures Red Team impact: The adoption rate metric](https://about.gitlab.com/blog/how-gitlab-measures-red-team-impact-the-adoption-rate-metric/), and Global Search Release Planning issues for the [18.1](https://gitlab.com/gitlab-org/search-team/team-tasks/-/issues/239), [18.2](https://gitlab.com/gitlab-org/search-team/team-tasks/-/issues/241), and [18.3](https://gitlab.com/gitlab-org/search-team/team-tasks/-/issues/245) milestones.\n\n## What's next\n\nEmbedded views are just the start of [Knowledge Group's](https://about.gitlab.com/direction/plan/knowledge/) vision for work tracking. Learn more about what we're focusing on next in the [embedded views post-GA epic](https://gitlab.com/groups/gitlab-org/-/epics/15249). As embedded views evolve we're committed to making them even more powerful and [accessible](https://gitlab.com/gitlab-org/gitlab/-/issues/548722).\n\n## Share your experience\n\nShare your feedback in the [embedded views GA feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/509792) or via the [embedded views GA survey](https://gitlab.fra1.qualtrics.com/jfe/form/SV_6PFhgZMBA06kr7E). Whether you've discovered innovative use cases, encountered challenges, or have ideas for improvements, we want to hear from you.\n",[908,9,888],{"featured":6,"template":698,"slug":1565},"embedded-views-the-future-of-work-tracking-in-gitlab","content:en-us:blog:embedded-views-the-future-of-work-tracking-in-gitlab.yml","Embedded Views The Future Of Work Tracking In Gitlab","en-us/blog/embedded-views-the-future-of-work-tracking-in-gitlab.yml","en-us/blog/embedded-views-the-future-of-work-tracking-in-gitlab",{"_path":1571,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1572,"content":1578,"config":1583,"_id":1585,"_type":14,"title":1586,"_source":16,"_file":1587,"_stem":1588,"_extension":19},"/en-us/blog/enable-secure-sudo-access-for-gitlab-remote-development-workspaces",{"title":1573,"description":1574,"ogTitle":1573,"ogDescription":1574,"noIndex":6,"ogImage":1575,"ogUrl":1576,"ogSiteName":685,"ogType":686,"canonicalUrls":1576,"schema":1577},"Enable secure sudo access for GitLab Remote Development workspaces","Learn how to allow support for sudo commands using Sysbox, Kata Containers, and user namespaces in this easy-to-follow tutorial.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675033/Blog/Hero%20Images/blog-image-template-1800x945.png","https://about.gitlab.com/blog/enable-secure-sudo-access-for-gitlab-remote-development-workspaces","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Enable secure sudo access for GitLab Remote Development workspaces\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Vishal Tak\"}],\n        \"datePublished\": \"2024-11-20\",\n      }",{"title":1573,"description":1574,"authors":1579,"heroImage":1575,"date":1337,"body":1580,"category":693,"tags":1581},[1112],"A development environment often requires sudo permissions to install, configure, and use dependencies during runtime. GitLab now allows secure sudo access for [GitLab Remote Development workspaces](https://about.gitlab.com/blog/quick-start-guide-for-gitlab-workspaces/). This tutorial shows you how to enable GitLab workspace users to securely use sudo commands to perform common tasks.\n\n## The challenge\n\nFor the sake of this article, say your project is as simple as the below code.\n\n```\npackage main\n\nimport (\n\t\"encoding/json\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"os\"\n)\n\nfunc main() {\n\t// Set up JSON logger\n\tlogFile, err := os.OpenFile(\"server.log\", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tdefer logFile.Close()\n\n\tjsonHandler := slog.NewJSONHandler(logFile, nil)\n\tlogger := slog.New(jsonHandler)\n\tslog.SetDefault(logger)\n\n\t// Define handlers\n\thttp.HandleFunc(\"/path1\", handleRequest)\n\thttp.HandleFunc(\"/path2\", handleRequest)\n\n\t// Start server\n\tslog.Info(\"Starting server on :3000\")\n\terr = http.ListenAndServe(\":3000\", nil)\n\tif err != nil {\n\t\tslog.Error(\"Server failed to start\", \"error\", err)\n\t}\n}\n\nfunc handleRequest(w http.ResponseWriter, r *http.Request) {\n\tdata := make(map[string]interface{})\n\tfor k, v := range r.Header {\n\t\tdata[k] = v\n\t}\n\n\tdata[\"method\"] = r.Method\n\tdata[\"url\"] = r.URL.String()\n\tdata[\"remote_addr\"] = r.RemoteAddr\n\n\tresponse, err := json.MarshalIndent(data, \"\", \"  \")\n\tif err != nil {\n\t\tslog.Error(\"Failed to marshal metadata\", \"error\", err)\n\t\thttp.Error(w, \"Internal Server Error\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Log the metadata\n\tslog.Info(\"Request received\",\n\t\t\"path\", r.URL.Path,\n\t\t\"response\", string(response),\n\t)\n\n\t// Write response\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Write(response)\n}\n```\n\nThis code starts an HTTP server on port 3000, exposes two paths: `path1` and `path2`. Each HTTP request received is logged to a file `server.log`.\n\nLet's run this code with `go run main.go` and generate some requests.\n\n```\ni=1\nwhile [ \"$i\" -le 100 ]; do\n  echo \"Iteration $i\"\n\n  if [ $((random_number % 2)) -eq 0 ]; then\n    curl \"localhost:3000/path1\"\n  else\n    curl \"localhost:3000/path2\"\n  fi\n\n  i=$((i + 1))\ndone\n```\n\nAs you work on this application, you realize the need to analyze the logs to debug an issue. You look at the log file and it is long to parse with a simple glance. You remember there is a handy tool, [jq](https://jqlang.github.io/jq/), which parses JSON data. But your workspace does not have it installed.\n\nYou want to install `jq` through the package manager for this workspace only.\n\n```\nsudo apt update\nsudo apt install jq\n```\n\nThe output is:\n\n```\nsudo: The \"no new privileges\" flag is set, which prevents sudo from running as root.\nsudo: If sudo is running in a container, you may need to adjust the container configuration to disable the flag.\n```\n\nThis happens because GitLab workspaces explicitly disallows `sudo` access to prevent privilege escalation on the Kubernetes host.\n\nNow, there is a more secure way to run `sudo` commands in a workspace.\n\n## How sudo access works\n\nThat is exactly what we have [unlocked](https://docs.gitlab.com/ee/user/workspace/configuration.html#configure-sudo-access-for-a-workspace) in the 17.4 release of GitLab.\n\nYou can configure secure sudo access for workspaces using any of the following options:\n\n- Sysbox  \n- Kata Containers  \n- User namespaces\n\nWe will set up three GitLab agents for workspaces to demonstrate each option.\n\n### Sysbox\n\n[Sysbox](https://github.com/nestybox/sysbox) is a container runtime that improves container isolation and enables containers to run the same workloads as virtual machines.\n\nTo configure sudo access for a workspace with Sysbox:\n\n1. In the Kubernetes cluster, [install Sysbox](https://github.com/nestybox/sysbox#installation).\n2. In the GitLab agent for workspaces, set the following config:\n\n```\nremote_development:\n  enabled: true\n  dns_zone: \"sysbox-update.me.com\"\n  default_runtime_class: \"sysbox-runc\"\n  allow_privilege_escalation: true\n  annotations:\n    \"io.kubernetes.cri-o.userns-mode\": \"auto:size=65536\"\n```\n\n3. Add other settings in the agent config as per your requirements. [GitLab agent for workspaces settings](https://docs.gitlab.com/ee/user/workspace/gitlab_agent_configuration.html#workspace-settings) for more information about individual settings.  \n4. Allow the agent to be used for workspaces in a group. See the [documentation](https://docs.gitlab.com/ee/user/workspace/gitlab_agent_configuration.html#allow-a-cluster-agent-for-workspaces-in-a-group) for more information.  \n5. Update GitLab Workspaces Proxy to serve traffic for the domain used in the above agent configuration. See [Tutorial: Set up the GitLab workspaces proxy](https://docs.gitlab.com/ee/user/workspace/set_up_workspaces_proxy.html) for more information.\n\n### Kata Containers\n\n[Kata Containers](https://github.com/kata-containers/kata-containers) is a standard implementation of lightweight virtual machines that perform like containers but provide the workload isolation and security of virtual machines.\n\nTo configure sudo access for a workspace with Kata Containers:\n\n1. In the Kubernetes cluster, [install Kata Containers](https://github.com/kata-containers/kata-containers/tree/main/docs/install).  \n2. In the GitLab agent for workspaces, set the following config:\n\n```\nremote_development:\n  enabled: true\n  dns_zone: \"kata-update.me.com\"\n  default_runtime_class: \"kata-qemu\"\n  allow_privilege_escalation: true\n```\n\n3. Add other settings in the agent config as per your requirements. [GitLab agent for workspaces settings](https://docs.gitlab.com/ee/user/workspace/gitlab_agent_configuration.html#workspace-settings) for more information about individual settings.  \n4. Allow the agent to be used for workspaces in a group. See the [documentation](https://docs.gitlab.com/ee/user/workspace/gitlab_agent_configuration.html#allow-a-cluster-agent-for-workspaces-in-a-group) for more information.  \n5. Update GitLab Workspaces Proxy to serve traffic for the domain used in the above agent configuration. See [Tutorial: Set up the GitLab workspaces proxy](https://docs.gitlab.com/ee/user/workspace/set_up_workspaces_proxy.html) for more information.\n\n### User namespaces\n\n[User namespaces](https://kubernetes.io/docs/concepts/workloads/pods/user-namespaces/) isolate the user running inside the container from the user on the host.\n\nTo configure sudo access for a workspace with user namespaces:\n\n1. In the Kubernetes cluster, [configure user namespaces](https://kubernetes.io/blog/userns-beta/).  \n2. In the GitLab agent for workspaces, set the following config:\n\n```\nremote_development:\n  enabled: true\n  dns_zone: \"userns-update.me.com\"\n  use_kubernetes_user_namespaces: true\n  allow_privilege_escalation: true\n```\n\n3. Add other settings in the agent config as per your requirements. [GitLab agent for workspaces settings](https://docs.gitlab.com/ee/user/workspace/gitlab_agent_configuration.html#workspace-settings) for more information about individual settings.  \n4. Allow the agent to be used for workspaces in a group. See the [documentation](https://docs.gitlab.com/ee/user/workspace/gitlab_agent_configuration.html#allow-a-cluster-agent-for-workspaces-in-a-group) for more information.  \n5. Update GitLab Workspaces Proxy to serve traffic for the domain used in the above agent configuration. See [Tutorial: Set up the GitLab workspaces proxy](https://docs.gitlab.com/ee/user/workspace/set_up_workspaces_proxy.html) for more information.\n\nSetting up a Kubernetes cluster with user namespaces configured is challenging since it is behind a beta feature gate in Kubernetes Version 1.31.0. This means it is not yet possible to configure such a cluster on the major cloud providers because they don't provide a mechanism to enable feature gates in their managed Kubernetes offering. Here is an example of [configuring a simple Kuberenetes cluster using `kubeadm`](https://gitlab.com/gitlab-org/gitlab/-/issues/468290#note_1959300036).\n\n### Create a workspace\n\nIf you now create a workspace with these agents and try installing `jq` through a package manager, it should succeed!\n\nYou can analyze the logs using `jq`. Say you wanted to inspect the log entries where the path is `/path1`, you can run:\n\n```\njq 'select(.path == \"/path1\")' server.log\n```\n\nThe output is:\n\n```\n{\n  \"time\": \"2024-10-31T12:04:38.474806+05:30\",\n  \"level\": \"INFO\",\n  \"msg\": \"Request received\",\n  \"path\": \"/path1\",\n  \"response\": \"{\\n  \\\"Accept\\\": [\\n    \\\"*/*\\\"\\n  ],\\n  \\\"User-Agent\\\": [\\n    \\\"curl/8.7.1\\\"\\n  ],\\n  \\\"method\\\": \\\"GET\\\",\\n  \\\"remote_addr\\\": \\\"[::1]:61246\\\",\\n  \\\"url\\\": \\\"/path1\\\"\\n}\"\n}\n{\n  \"time\": \"2024-10-31T12:06:22.397453+05:30\",\n  \"level\": \"INFO\",\n  \"msg\": \"Request received\",\n  \"path\": \"/path1\",\n  \"response\": \"{\\n  \\\"Accept\\\": [\\n    \\\"*/*\\\"\\n  ],\\n  \\\"User-Agent\\\": [\\n    \\\"curl/8.7.1\\\"\\n  ],\\n  \\\"method\\\": \\\"GET\\\",\\n  \\\"remote_addr\\\": \\\"[::1]:61311\\\",\\n  \\\"url\\\": \\\"/path1\\\"\\n}\"\n}\n{\n  \"time\": \"2024-10-31T12:19:34.974354+05:30\",\n  \"level\": \"INFO\",\n  \"msg\": \"Request received\",\n  \"path\": \"/path1\",\n  \"response\": \"{\\n  \\\"Accept\\\": [\\n    \\\"*/*\\\"\\n  ],\\n  \\\"User-Agent\\\": [\\n    \\\"curl/8.7.1\\\"\\n  ],\\n  \\\"method\\\": \\\"GET\\\",\\n  \\\"remote_addr\\\": \\\"[::1]:61801\\\",\\n  \\\"url\\\": \\\"/path1\\\"\\n}\"\n}\n```\n\n## Get started today\n\nLearn even more with our [Configure sudo access for a workspace documentation](https://docs.gitlab.com/ee/user/workspace/configuration.html#configure-sudo-access-for-a-workspace). See [GitLab agent for workspaces settings](https://docs.gitlab.com/ee/user/workspace/gitlab_agent_configuration.html#workspace-settings) for details on individual settings.\n\n> New to GitLab Remote Development? Here is a [quickstart guide](https://about.gitlab.com/blog/quick-start-guide-for-gitlab-workspaces/) to get you up to speed.",[693,719,1582,9,1319],"remote work",{"slug":1584,"featured":91,"template":698},"enable-secure-sudo-access-for-gitlab-remote-development-workspaces","content:en-us:blog:enable-secure-sudo-access-for-gitlab-remote-development-workspaces.yml","Enable Secure Sudo Access For Gitlab Remote Development Workspaces","en-us/blog/enable-secure-sudo-access-for-gitlab-remote-development-workspaces.yml","en-us/blog/enable-secure-sudo-access-for-gitlab-remote-development-workspaces",{"_path":1590,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1591,"content":1597,"config":1604,"_id":1606,"_type":14,"title":1607,"_source":16,"_file":1608,"_stem":1609,"_extension":19},"/en-us/blog/enhance-application-security-with-gitlab-hackerone",{"title":1592,"description":1593,"ogTitle":1592,"ogDescription":1593,"noIndex":6,"ogImage":1594,"ogUrl":1595,"ogSiteName":685,"ogType":686,"canonicalUrls":1595,"schema":1596},"Enhance application security with GitLab + HackerOne","Learn about the GitLab + HackerOne partnership and how to easily implement an integration that improves your organization’s application security posture.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097503/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2810%29_5ET24Q6i8ihqrAOkge7a1R_1750097503214.png","https://about.gitlab.com/blog/enhance-application-security-with-gitlab-hackerone","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Enhance application security with GitLab + HackerOne\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2025-04-03\",\n      }",{"title":1592,"description":1593,"authors":1598,"heroImage":1594,"date":1600,"body":1601,"category":693,"tags":1602},[1599],"Fernando Diaz","2025-04-03","Security can no longer be an afterthought in the development process.\nOrganizations need robust solutions that integrate security throughout the\nentire software development lifecycle. This is where the partnership between\nHackerOne and GitLab creates a compelling combination for modern application\ndevelopment teams.\n\n\nGitLab, the comprehensive, AI-powered DevSecOps platform, and HackerOne, the\nleading crowd-sourced security platform, have established a partnership that\nbrings together the best of both worlds: GitLab's streamlined DevSecOps\nworkflow and HackerOne's powerful vulnerability management capabilities.\n\n\nIn this tutorial, you'll learn how to enhance developer productivity and\nyour security posture by implementing HackerOne's GitLab integration.\n\n\n## An integration that empowers developers\n\n\nHackerOne's GitLab integration is remarkably straightforward, yet powerful.\nWhen security researchers discover vulnerabilities through HackerOne's\nplatform, these findings are automatically converted into GitLab issues.\nThis creates a seamless workflow where:\n\n\n* Security researchers identify vulnerabilities via HackerOne's platform  \n\n* Validated vulnerabilities are automatically converted into GitLab issues  \n\n* Development teams can address these issues directly within their existing\nworkflow  \n\n* Resolution status is synchronized between both platforms\n\n\nYou can start leveraging the benefits of GitLab and HackerOne by using the\n[integration](https://docs.hackerone.com/en/articles/8571227-gitlab-integration)\nto track GitLab issues as references on HackerOne. This integration provides\nbi-directional and seamless data syncing between your HackerOne report and\nGitLab issues, improving alignment between development and security teams\nwhile streamlining security vulnerability processing.\n\n\nTo configure the GitLab integration to sync information between your\nHackerOne report and your Gitlab issue, follow the instructions provided in\n[HackerOne's GitLab integration\ndocumentation](https://docs.hackerone.com/en/articles/10394699-gitlab-setup),\nwhich includes:\n\n\n1. [Setting up an OAuth 2.0\napplication](https://docs.gitlab.com/ee/integration/oauth_provider.html) for\nyour GitLab instance with the provided HackerOne settings  \n\n2. Connecting HackerOne to the newly created OAuth 2.0 on GitLab  \n\n3. Authorizing HackerOne to access the GitLab API  \n\n4. Configuring which GitLab project you would like to escalate HackerOne\nreports to  \n\n5. Selecting the HackerOne fields to map to corresponding GitLab fields  \n\n6. GitLab-to-HackerOne and HackerOne-to-GitLab event configuration\n\n\nOnce the integration is in place, you’ll be able to seamlessly sync data\nbi-directionally between both GitLab and HackerOne. This helps simplify\ncontext-switching and allows vulnerabilities to be tracked with ease\nthroughout both systems. The integration allows for the following features:\n\n\n* **Creating a GitLab Issue from HackerOne:** You can create new GitLab\nissues for reports you receive on HackerOne.  \n\n* **Linking HackerOne reports to existing GitLab tasks.**   \n\n* **Syncing updates from HackerOne to GitLab:** The following updates on a\nreport are synced as a comment to GitLab.  \n  * Report comments  \n  * State changes  \n  * Rewards  \n  * Assignee changes  \n  * Public disclosure  \n  * Close GitLab Issue  \n* **Syncing Updates from GitLab to HackerOne:** The following updates on\nGitLab will be reflected in HackerOne as an internal comment on the\nassociated report:  \n  * Comments  \n  * State changes  \n* **HackerOne severity to GitLab label mapping**: Allows you to set a custom\npriority when escalating a report to GitLab.  \n\n* **Due date mapping:** Allows you to automatically set a custom due date\nbased on the severity of a report.\n\n\n![GitLab + HackerOne adding comments or change the state of the report in\nGitLab](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097510/Blog/Content%20Images/Blog/Content%20Images/sync_aHR0cHM6_1750097509644.png)\n\n\nThese features improve alignment between development and security teams and\nstreamlining security vulnerability processing. To learn more on how the\nintegration works, see the [integration\ndocumentation](https://docs.hackerone.com/en/articles/8571227-gitlab-integration).\n\n\n## A look into HackerOne bug bounty programs\n\n\nHackerOne provides bug bounty programs or cybersecurity initiatives where\nrewards are offered for discovering and reporting vulnerabilities in\ncustomers’ software systems, websites, or applications. Bug bounty programs\nhelp enhance the security of an application by:\n\n\n* Identifying security flaws before malicious actors can exploit them  \n\n* Leveraging diverse expertise from a global community of security\nresearchers  \n\n* Providing a cost-effective way to improve cybersecurity  \n\n* Complementing internal security efforts and traditional penetration\ntesting\n\n\nGitLab utilizes HackerOne’s bug bounty program, allowing security\nresearchers to report vulnerabilities in GitLab applications or\ninfrastructure. This crowdsourced approach helps GitLab identify and address\npotential security issues more effectively.\n\n\n![HackerOne GitLab Bug Bounty\npage](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097510/Blog/Content%20Images/Blog/Content%20Images/hackerone_gitlab_bug_bounty_page_aHR0cHM6_1750097509645.png)\n\n\nBy leveraging HackerOne's platform and the global hacker community,\norganizations can significantly enhance their security posture, identify\nvulnerabilities faster, and stay ahead of potential threats.\n\n\n## Secure applications and improve efficiency with GitLab \n\n\nGitLab provides a complete DevSecOps platform, which enables functionality\nfor the complete software development lifecycle, including security and\ncompliance tools. GitLab supports the following security scanner types:\n\n- Static Application Security Testing (SAST)\n\n- Dynamic Application Security Testing (DAST)\n\n- Container Scanning\n\n- Dependency Scanning\n\n- Infrastructure as Code Scanning\n\n- Coverage-guided Fuzzing\n\n- Web API Fuzzing\n\n\nWith GitLab, you can add security scanning by simply applying a template to\nyour CI/CD pipeline definition file. For example, enabling SAST just takes a\nfew lines of code in the `.gitlab-ci.yml`:\n\n\n```yaml\n\nstage:\n  - test\n\ninclude:\n  - template: Jobs/SAST.gitlab-ci.yml\n```\n\n\nThis will run SAST on the test stage, and [auto-detect the languages\nused](https://docs.gitlab.com/ee/user/application_security/sast/#supported-languages-and-frameworks)\nin your application. Then, whenever you create a merge request, SAST will\ndetect the vulnerabilities in the diff between the feature branch and the\ntarget branch and provide relevant data on each vulnerability to assist with\nremediation.\n\n\n![NoSQL injection vulnerability seen in\nMR](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097510/Blog/Content%20Images/Blog/Content%20Images/no_sql_injection_vulnerability_mr_view_aHR0cHM6_1750097509647.png)\n\n\nThe results of the SAST scanner can block code from being merged if security\npolicies are applied. Native GitLab users can be set as approvers, allowing\nrequired reviews before merging insecure code. This assures that all\nvulnerabilities have oversight from the appropriate parties.\n\n\n![Merge request approval\npolicy](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097510/Blog/Content%20Images/Blog/Content%20Images/merge_request_approval_policy_aHR0cHM6_1750097509649.png)\n\n\nHackerOne has integrated GitLab into its operations and development\nprocesses in several significant ways, which have led to development process\nimprovements and enhanced scalability and collaboration. These improvements\ninclude faster deployments and cross-team planning.\n\n\n## Key benefits of HackerOne's GitLab integration\n\n\nThe key benefits of using HackerOne and GitLab together include:\n\n\n* **Enhanced security visibility:** Development teams gain immediate\nvisibility into security vulnerabilities without leaving their primary\nworkflow environment. This real-time awareness helps teams prioritize\nsecurity issues alongside feature development.  \n\n* **Streamlined remediation process:** By converting HackerOne reports\ndirectly into GitLab issues, the remediation process becomes part of the\nstandard development cycle. This eliminates context switching between\nplatforms and ensures security fixes are tracked alongside other development\nwork.  \n\n* **Accelerated time to fix:** The integration significantly reduces the\ntime between vulnerability discovery and resolution. With HackerOne\nsubmissions immediately available in GitLab, development teams can begin\nworking on fixes without delay, improving overall security posture.  \n\n* **Improved collaboration:** Security researchers, security teams, and\ndevelopers can communicate more effectively through this integration.\nComments and updates flow between both platforms, creating a collaborative\nenvironment focused on improving security.  \n\n* **Real-world impact:** Organizations implementing the HackerOne and GitLab\nintegration have reported:  \n  * Up to 70% reduction in time from vulnerability discovery to fix  \n  * Improved developer satisfaction by keeping them in their preferred workflow  \n  * Enhanced security visibility across the organization  \n  * More effective allocation of security resources\n\n> To get started today, visit [the integration setup\npage](https://docs.hackerone.com/en/articles/10394699-gitlab-setup) today.\n\n\n## Learn more\n\n\nTo learn more about GitLab and HackerOne, and how we can help enhance your\nsecurity posture, check out the following resources:\n\n* [HackerOne's GitLab Integration\nUsage](https://docs.hackerone.com/en/articles/8571227-gitlab-integration)  \n\n* [HackerOne GitLab Bug Bounty\nProgram](https://hackerone.com/gitlab?type=team)\n\n* [GitLab Security and Compliance\nSolutions](https://about.gitlab.com/solutions/security-compliance/)  \n\n* [HackerOne achieves 5x faster deployments with GitLab’s integrated\nsecurity](https://about.gitlab.com/customers/hackerone/)  \n\n* [GitLab Application Security\nDocumentation](https://docs.gitlab.com/ee/user/application_security/)\n",[693,719,233,284,9,951,1603],"bug bounty",{"slug":1605,"featured":6,"template":698},"enhance-application-security-with-gitlab-hackerone","content:en-us:blog:enhance-application-security-with-gitlab-hackerone.yml","Enhance Application Security With Gitlab Hackerone","en-us/blog/enhance-application-security-with-gitlab-hackerone.yml","en-us/blog/enhance-application-security-with-gitlab-hackerone",{"_path":1611,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1612,"content":1618,"config":1623,"_id":1625,"_type":14,"title":1626,"_source":16,"_file":1627,"_stem":1628,"_extension":19},"/en-us/blog/enhance-data-security-with-custom-pii-detection-rulesets",{"title":1613,"description":1614,"ogTitle":1613,"ogDescription":1614,"noIndex":6,"ogImage":1615,"ogUrl":1616,"ogSiteName":685,"ogType":686,"canonicalUrls":1616,"schema":1617},"Strengthen data security with custom PII detection rulesets","This tutorial explains how GitLab's customizable Secret Detection rulesets enhance data security by identifying PII patterns in code repositories. Learn how AI can help.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097701/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%285%29_1iy516k40hwBDChKcUJ2zb_1750097700983.png","https://about.gitlab.com/blog/enhance-data-security-with-custom-pii-detection-rulesets","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Strengthen data security with custom PII detection rulesets\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2025-04-01\",\n      }",{"title":1613,"description":1614,"authors":1619,"heroImage":1615,"date":1620,"body":1621,"category":693,"tags":1622},[1599],"2025-04-01","Protecting sensitive information is more critical than ever. GitLab's Secret\nDetection feature provides a powerful solution to identify and prevent the\nexposure of sensitive data. This tutorial explores how GitLab Secret\nDetection works, how to create custom rulesets for finding personally\nidentifiable information, and how GitLab Duo Chat can streamline the\ncreation of regex patterns for PII detection.\n\n\n## Understanding GitLab Secret Detection\n\n\n[GitLab Secret\nDetection](https://docs.gitlab.com/user/application_security/secret_detection/)\nis a security scanning feature integrated into the GitLab CI/CD pipeline. It\nautomatically scans your codebase to identify hardcoded secrets,\ncredentials, and other sensitive information that shouldn't be stored in\nyour repository.\n\n\n### Key benefits\n\n\n* **Data breach prevention** detects secrets before they're committed to\nyour repository.  \n\n* **Automated scanning** runs as part of your CI/CD pipeline without manual\nintervention.  \n\n* **Customizable rules** extend detection capabilities with custom\npatterns.  \n\n* **Compliance support** helps meet regulatory requirements like GDPR,\nHIPAA, and the California Privacy Protection Act.\n\n\n## Create custom rulesets for PII detection\n\n\nWhile GitLab's default secret detection covers common secrets like API keys\nand passwords, you may need custom rules to identify specific types of PII\nrelevant to your organization. \n\n\nTo get started, create a new GitLab project and follow the steps below. You\ncan follow along and see usage examples in our [PII Demo\nApplication](https://gitlab.com/gitlab-da/tutorials/security-and-governance/devsecops/secret-scanning/pii-data-ruleset).\n\n\n**Step 1: Set up Secret Detection**\n\n\nEnsure Secret Detection is enabled in your `.gitlab-ci.yml` file:\n\n\n```\n\ninclude:\n  - template: Security/Secret-Detection.gitlab-ci.yml\n\nsecret_detection:\n  variables:\n    SECRET_DETECTION_EXCLUDED_PATHS: \"rules,.gitlab,README.md,LICENSE\"\n    SECRET_DETECTION_HISTORIC_SCAN: \"true\"\n```\n\n\n**Step 2: Create a custom ruleset file**\n\n\nCreate the directory and file `rules/pii-data-extenson.toml`, which contains\nthe regex patterns for PII data along with an allowlist of patterns to\nignore. Below are patterns to detect passport numbers (USA), phone numbers\n(USA), and email addresses:\n\n\n```toml\n\n[extend]\n\n# Extends default packaged ruleset, NOTE: do not change the path.\n\npath = \"/gitleaks.toml\"\n\n\n# Patterns to ignore (used for tests)\n\n[allowlist]\n\ndescription = \"allowlist of patterns and paths to ignore in detection\"\n\nregexTarget = \"match\"\n\nregexes = ['''555-555-5555''', '''user@example.com''']\n\npaths = ['''(.*?)(jpg|gif|doc|pdf|bin|svg|socket)''']\n\n\n# US Passport Number (USA)\n\n[[rules]]\n\nid = \"us_passport_detection\"\n\ntitle = \"US Passport Number\"\n\ndescription = \"Detects US passport numbers\"\n\nregex = '''\\b[A-Z]{1,2}[0-9]{6,9}\\b'''\n\nkeywords = [\"passport\"]\n\n\n# Phone Number (USA)\n\n[[rules]]\n\nid = \"us_phone_number_detection_basic\"\n\ntitle = \"US Phone Number\"\n\ndescription = \"Detects US phone numbers in basic format\"\n\nregex = '''\\b\\d{3}-\\d{3}-\\d{4}\\b'''\n\nkeywords = [\"phone\", \"mobile\"]\n\n\n# Email Address\n\n[[rules]]\n\nid = \"email_address\"\n\ntitle = \"Email Address\"\n\ndescription = \"Detects email addresses\"\n\nregex = '''[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}'''\n\nkeywords = [\"email\", \"e-mail\"]\n\n```\n\n**Step 3: Extend Secret Detection with the custom ruleset file**\n\n\nCreate a directory and file `.gitlab/secret-detection-ruleset.toml` in the\nroot of your repository. This file allows you to extend the standard\nconfiguration with the PII rules file, and overwrite the severity of the\ndetected vulnerabilities (default severity is `Critical`).\n\n\n```\n\n# Define the pii rules to add to default configuration\n\n[[secrets.passthrough]]\n\ntype = \"file\"\n\ntarget = \"gitleaks.toml\"\n\nvalue = \"rules/pii-data-extension.toml\"\n\n\n# Overwrite Phone Number (USA) PII Severity\n\n[[secrets.ruleset]]\n\n[secrets.ruleset.identifier]\n\ntype = \"gitleaks_rule_id\"\n\nvalue = \"us_phone_number_detection_basic\"\n\n[secrets.ruleset.override]\n\nseverity = \"Medium\"\n\n\n# Overwrite Email Address PII Severity\n\n[[secrets.ruleset]]\n\n[secrets.ruleset.identifier]\n\ntype = \"gitleaks_rule_id\"\n\nvalue = \"email_address\"\n\n[secrets.ruleset.override]\n\nseverity = \"Low\"\n\n```\n\n\n**Step 4: Commit your changes**\n\n\nNow add the changes in the above steps to your project.\n\n\n```\n\ncd /path/to/your/project\n\ngit add .\n\ngit commit -m \"Add PII data ruleset and Secret Scanning\"\n\ngit push\n\n```\n\n\nOnce the code is committed, Secret Detection will run within the default\nbranch.\n\n\n**Step 5: Test detection of PII data**\n\n\nNow that we have configured the Secret Detection scanner, we should perform\na test to see if the scanner is detecting the new custom patterns. This can\nbe done by creating a merge request, which adds a new file named\n`customer-data.yaml` with the following:\n\n\n```yaml  \n\ncustomers:  \n  test_user:  \n    phone_number: 555-555-555  \n    email: user@example.com  \n  justin_case:  \n    phone_number: 512-123-4567  \n    passport_number: A12345678  \n    email: justin_case@example.com  \n  chris_p_bacon: \n    phone_number: 305-123-4567  \n    passport_number: B09876543  \n    email: chris_p_bacon@example.com  \n```\n\n\nThe scanner should now perform the following:\n\n\n* Ignore the `phone_number` and `email` of `test_user` due to patterns being\nin allowlist  \n\n* Detect six potential vulnerabilities due to the information present for\nboth `justin_case` and `chris_p_bacon`\n  * U.S. passport number severity is set to `Critical` (default)  \n  * U.S. phone number severity is set to `Medium` (override)  \n  * Email address severity is set to `Low` (override)\n  * Data from rules override is added to each vulnerability\n\nOnce the [merge\nrequest](https://gitlab.com/gitlab-da/tutorials/security-and-governance/devsecops/secret-scanning/pii-data-ruleset/-/merge_requests/4)\nis submitted, the Secret Detection scanner runs and provides the following\nresults:\n\n\n![Secret Detection finding custom PII data\nMR](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097709/Blog/Content%20Images/Blog/Content%20Images/pii_vulns_aHR0cHM6_1750097709683.png)\n\n\nWhen clicking on a vulnerability, you are presented with detailed\nvulnerability data based on what was configured in your newly set up rules:\n\n\n![Expanded custom PII data\nvulnerability](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097710/Blog/Content%20Images/Blog/Content%20Images/pii_vulns_expanded_aHR0cHM6_1750097709684.png)\n\n\nThis data allows you to determine the validity of the data present and\naddress it accordingly.\n\n\nThere are additional ways to configure custom rulesets. For example, rules\ncan be applied remotely to several projects, avoiding the need to duplicate\nthe rules file. See the [Secret Detection Configuration\ndocumentation](https://docs.gitlab.com/user/application_security/secret_detection/pipeline/configure/)\nfor more information.\n\n\n## Common PII types to consider\n\n\nWhen building your custom ruleset, consider including patterns for:\n\n\n* Social Security Numbers  \n\n* Credit card numbers  \n\n* Driver's license numbers  \n\n* Passport numbers  \n\n* Email addresses  \n\n* Phone numbers  \n\n* IP addresses  \n\n* Physical addresses  \n\n* Medical record numbers\n\n\n## Leveraging GitLab Duo Chat for pattern creation\n\n\nCreating effective regex patterns can be challenging. This is where [GitLab\nDuo Chat](https://docs.gitlab.com/user/gitlab_duo_chat/) comes in handy.\nGitLab Duo Chat is an AI-powered assistant that accelerates development with\ncontextual, conversational AI. Chat is available to any user with an\nassigned GitLab Duo seat.\n\n\n### Using GitLab Duo Chat to generate PII regex patterns\n\n\n1. **Access GitLab Duo Chat:** In the upper right corner, select __GitLab\nDuo Chat__. A drawer opens on the right side of your screen.\n\n\n2. **Prompt the chat:** Enter a prompt in the chat input box and press\n__Enter__ or select __Send__:\n\n\n```\n\nI need a regex pattern to detect Canadian Zip Codes in our codebase for\nsecret detection\n\n```\n\n\n3. **Review and refine:** GitLab Duo Chat will suggest a regex pattern that\nyou can review and refine:\n\n\n```\n\nCanadian postal codes follow the format \"A1A 1A1\" (where A is a letter and 1\nis a digit). Here's a regex pattern that will detect Canadian postal codes\nin your codebase, accounting for both spaced and non-spaced formats:\n\n[A-Za-z]\\d[A-Za-z][ -]?\\d[A-Za-z]\\d\n\nThis pattern matches:\n\n* A letter, followed by a digit, followed by a letter\n\n* An optional space or hyphen\n\n* A digit, followed by a letter, followed by a digit\n\nIf you need a more strict pattern that only matches uppercase letters (as\nofficial Canadian postal codes use), you can use:\n\n[A-Z]\\d[A-Z][ -]?\\d[A-Z]\\d\n\nFor implementation in a secret detection tool, you might want to add word\nboundaries to avoid partial matches:\n\n\\b[A-Z]\\d[A-Z][ -]?\\d[A-Z]\\d\\b\n\nWould you like more details about implementing this in a specific secret\ndetection system?\n\n```\n\n\n4. **Test the pattern:** Test the pattern against sample data to ensure it\nworks correctly. You can use sites like\n[https://regex101.com/](https://regex101.com/) to validate patterns before\nyou submit them to your ruleset.\n\n\n5. **Add to your ruleset:** Once satisfied, add the pattern to your\n`rules/pii-data-extension.toml` file and perform some tests to validate.\nThis sample [merge\nrequest](https://gitlab.com/gitlab-da/tutorials/security-and-governance/devsecops/secret-scanning/pii-data-ruleset/-/merge_requests/5/diffs)\ncontains a newly added rule for Canadian Zip Codes based on the above.\n\n\nAdditionally, you can use GitLab Duo Chat in:\n\n\n* [The GitLab Web IDE (VS Code in the\ncloud)](https://docs.gitlab.com/user/project/web_ide/)  \n\n* VS Code, with the [GitLab Workflow extension for VS\nCode](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow)  \n\n* JetBrains IDEs, with the [GitLab Duo Plugin for\nJetBrains](https://plugins.jetbrains.com/plugin/22325-gitlab-duo)  \n\n* Visual Studio for Windows, with the [GitLab Extension for Visual\nStudio](https://marketplace.visualstudio.com/items?itemName=GitLab.GitLabExtensionForVisualStudio)\n\n\nIn the future, you’ll be able to leverage [GitLab Duo\nWorkflow](https://docs.gitlab.com/user/duo_workflow/) (currently in private\nbeta) to automatically generate and add these patterns to your code base\ndirectly from your IDE. GitLab Duo Workflow is an AI agent, which transforms\nAI from reactive assistant to autonomous contributor, optimizing your\nsoftware development lifecycle. Learn more about [GitLab Duo\nWorkflow](https://about.gitlab.com/blog/meet-gitlab-duo-workflow-the-future-of-ai-driven-development/).\n\n\n## Best practices for PII detection\n\n\n1. **Start small:** Begin with a few critical PII types and expand\ngradually.  \n\n2. **Test thoroughly:** Test your patterns against sample data to avoid\nfalse positives.  \n\n3. **Update regularly:** Review and update your rulesets as new PII\nrequirements emerge.  \n\n4. **Document patterns:** Maintain documentation for your custom regex\npatterns.  \n\n5. **Balance precision:** Make patterns specific enough to avoid false\npositives but flexible enough to catch variations.  \n\n6. **Implement [Secret Push\nProtection](https://about.gitlab.com/blog/prevent-secret-leaks-in-source-code-with-gitlab-secret-push-protection/):**\nPrevent PII data from making it into your repository.  \n\n7. **Set up [Merge Request Approval\nPolicies](https://docs.gitlab.com/user/application_security/policies/merge_request_approval_policies/):**\nRequire approval before merging any possible PII data to your repository.\n\n\nOnce you have set up a PII data ruleset to meet your organization's needs,\nremote rulesets can scan for PII data across multiple repositories without\nthe need to duplicate the rules file. Watch this video to learn more:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/vjJxQz918WE?si=CRdIEodo3ALxVWXO\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## Handling Secret Detection findings\n\n\nWhen GitLab Secret Detection identifies potential PII in your code:\n\n\n1. **Review the finding:** Assess whether it's a legitimate finding or a\nfalse positive.  \n\n2. **Remediate:** Remove the sensitive data and replace it with environment\nvariables or secrets management.  \n\n3. **[Redact text from\nrepository](https://docs.gitlab.com/user/project/merge_requests/revert_changes/#redact-text-from-repository):**\nPermanently delete sensitive or confidential information that was\naccidentally committed, ensuring it's no longer accessible in your\nrepository's history. \n\n4. **Track progress:** Use GitLab's security dashboard to monitor ongoing\ncompliance.\n\n\n## Get started today\n\n\nGitLab Secret Detection, combined with custom PII rulesets, provides a\npowerful defense against inadvertent exposure of sensitive information. By\nleveraging GitLab Duo Chat to create precise regex patterns, teams can\nefficiently implement comprehensive PII detection across their codebase,\nensuring regulatory compliance and protecting user data.\n\n\nRemember that secret detection is just one component of a comprehensive\nsecurity strategy. Combine it with other GitLab security features like\nstatic application security testing, dynamic application security testing,\nand dependency scanning for a more robust security posture.\n\n\nStart implementing these practices today to better protect your users'\npersonal information and maintain the security integrity of your\napplications.\n\n\n> Start [a free trial of GitLab Ultimate and GitLab Duo\n](https://about.gitlab.com/free-trial/)today!\n\n\n## More resources\n\n\nTo learn more about GitLab security and compliance and how we can help\nenhance your AppSec workflows, follow the links below:\n\n\n* [GitLab Security and Compliance\nSolutions](https://about.gitlab.com/solutions/security-compliance/)  \n\n* [GitLab DevSecOps Platform](https://about.gitlab.com/platform/)  \n\n* [GitLab Duo (AI)](https://about.gitlab.com/gitlab-duo/)  \n\n* [GitLab Application Security\ndocumentation](https://docs.gitlab.com/user/application_security/) \n\n* [Secret Detection\ndocumentation](https://docs.gitlab.com/user/application_security/secret_detection/)\n",[693,717,719,9,803],{"slug":1624,"featured":91,"template":698},"enhance-data-security-with-custom-pii-detection-rulesets","content:en-us:blog:enhance-data-security-with-custom-pii-detection-rulesets.yml","Enhance Data Security With Custom Pii Detection Rulesets","en-us/blog/enhance-data-security-with-custom-pii-detection-rulesets.yml","en-us/blog/enhance-data-security-with-custom-pii-detection-rulesets",{"_path":1630,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1631,"content":1637,"config":1644,"_id":1646,"_type":14,"title":1647,"_source":16,"_file":1648,"_stem":1649,"_extension":19},"/en-us/blog/enhanced-migration-from-bitbucket-server-and-bitbucket-cloud-to-gitlab",{"title":1632,"description":1633,"ogTitle":1632,"ogDescription":1633,"noIndex":6,"ogImage":1634,"ogUrl":1635,"ogSiteName":685,"ogType":686,"canonicalUrls":1635,"schema":1636},"Enhanced migration from Bitbucket Server and Bitbucket Cloud to GitLab","Learn about performance improvements and more when migrating from Bitbucket Server and Cloud to GitLab.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749668776/Blog/Hero%20Images/julia-craice-faCwTallTC0-unsplash.jpg","https://about.gitlab.com/blog/enhanced-migration-from-bitbucket-server-and-bitbucket-cloud-to-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Enhanced migration from Bitbucket Server and Bitbucket Cloud to GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Magdalena Frankiewicz\"}],\n        \"datePublished\": \"2023-11-30\",\n      }",{"title":1632,"description":1633,"authors":1638,"heroImage":1634,"date":1640,"body":1641,"category":1277,"tags":1642},[1639],"Magdalena Frankiewicz","2023-11-30","_Atlassian is ending support for all Server products in February 2024. Learn more about the [benefits of migrating from Atlassian to GitLab](https://about.gitlab.com/move-to-gitlab-from-atlassian/)._\n\nStarting [from February 15, 2024](https://about.gitlab.com/blog/atlassian-server-ending-move-to-a-single-devsecops-platform/), Atlassian will no longer offer technical support, security updates, or vulnerability fixes for their Server products, including Bitbucket Server.\n\nThrough improvements to our Bitbucket Server and Bitbucket Cloud importers, we've lowered the barrier to switch to GitLab, especially for large Bitbucket projects. We are happy to be able to offer a quick and effortless way to move your data to GitLab!\n\nLet's take a look at some of these improvements.\n\n## Improvements to imports of large projects\n\nGitLab has offered Bitbucket Server and Bitbucket Cloud importers for a long time. However, these importers operated sequentially in only one Sidekiq background job, which led to timeouts on imports of larger projects.\n\nTo solve the timeouts problem, we introduced parallel, asynchronous importers that split the work into smaller background jobs. This change was introduced in:\n\n- [GitLab 16.1](https://gitlab.com/gitlab-org/gitlab/-/issues/411534) for the Bitbucket Server importer\n- [GitLab 16.6](https://gitlab.com/gitlab-org/gitlab/-/issues/412614) for the Bitbucket Cloud importer\n\nThis change:\n\n- ensures that the import process doesn’t time out on a single worker\n- spreads the number of calls we make to Bitbucket API, reducing the risk of running into rate limiting\n\nWe also improved error handling so that errors raised on single objects don't stop the whole import from completing.\n\n## More improvements\n\nRefactoring importers to be parallel was a crucial improvement, but not the only one we have made to our importers. We also worked to:\n\n- improve the integrity of imported data\n- extend the types of data that we import\n\nBecause Bitbucket Server and Bitbucket Cloud are separate products and require separate importers, the improvements we introduced differ for each importer. We describe them in the sub-sections below.\n\n### Bitbucket Server importer\n\nIn GitLab 16.5, we [fixed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131894) a problem when imported merged and closed merge requests had no commit data associated with them, leaving the diffs empty.\n\nIn Gitlab 16.3, we began [importing reviewers](https://gitlab.com/gitlab-org/gitlab/-/issues/416611) and in Gitlab 16.6, we began importing [pull request approvals](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135256).\n\n### Bitbucket Cloud importer\n\nIn GitLab 16.6, we fixed a problem that users encountered when a pull request on Bitbucket Cloud was squashed and merged, and the branch deleted. When these pull requests were imported to GitLab, the resulting merge requests didn't have associated commits. The problem was addressed by associating merge commits to imported merge requests.\n\nNotes on issues and pull requests can contain references (links) to code, issues, comments, pull requests, and more. Previously, these were imported as is, which left comments with strangely formatted, unclickable links. We [fixed this](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131382) in GitLab 16.6 by converting refs to GitLab refs. Also, we [no longer import deleted notes](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/133208), which caused data errors.\n\nAlso for the Bitbucket Cloud importer, we began [importing LFS objects](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/133182) in GitLab 16.5 and [pull request reviewers](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131134) in GitLab 16.6.\n\n## Take advantage of importers today\n\nWith the improvements described, the experience of switching from Bitbucket Server or Bitbucket Cloud to the GitLab DevSecOps platform is better than ever! Check out the [Bitbucket Server importer documentation](https://docs.gitlab.com/ee/user/project/import/bitbucket_server.html) or the [Bitbucket Cloud importer documentation](https://docs.gitlab.com/ee/user/project/import/bitbucket.html) to get started today.\n\nFor GitLab self-managed instances, to benefit from parallel Bitbucket Cloud importer, administrators must enable the `bitbucket_parallel_importer` [feature flag](https://docs.gitlab.com/ee/administration/feature_flags.html). The Bitbucket Server importer is always parallel on GitLab self-managed and GitLab.com.",[9,1643,233],"DevOps platform",{"slug":1645,"featured":6,"template":698},"enhanced-migration-from-bitbucket-server-and-bitbucket-cloud-to-gitlab","content:en-us:blog:enhanced-migration-from-bitbucket-server-and-bitbucket-cloud-to-gitlab.yml","Enhanced Migration From Bitbucket Server And Bitbucket Cloud To Gitlab","en-us/blog/enhanced-migration-from-bitbucket-server-and-bitbucket-cloud-to-gitlab.yml","en-us/blog/enhanced-migration-from-bitbucket-server-and-bitbucket-cloud-to-gitlab",{"_path":1651,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1652,"content":1658,"config":1663,"_id":1665,"_type":14,"title":1666,"_source":16,"_file":1667,"_stem":1668,"_extension":19},"/en-us/blog/enhancing-gitlab-with-improved-data-filtering-and-visualizations",{"title":1653,"description":1654,"ogTitle":1653,"ogDescription":1654,"noIndex":6,"ogImage":1655,"ogUrl":1656,"ogSiteName":685,"ogType":686,"canonicalUrls":1656,"schema":1657},"Enhancing GitLab with improved data filtering and visualizations","Discover how GitLab's new data views will streamline your workflow and power decision-making.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099097/Blog/Hero%20Images/Blog/Hero%20Images/agile_agile.png_1750099097133.png","https://about.gitlab.com/blog/enhancing-gitlab-with-improved-data-filtering-and-visualizations","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Enhancing GitLab with improved data filtering and visualizations\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Amanda Rueda\"}],\n        \"datePublished\": \"2024-03-05\",\n      }",{"title":1653,"description":1654,"authors":1659,"heroImage":1655,"date":1660,"body":1661,"category":1053,"tags":1662},[1050],"2024-03-05","In the hectic world of product management, quick access, refinement, and visualization of data are essential to drive efficiency and productivity forward. Our recent exploration into the needs of product managers uncovered a vital link between their success and their ability to make data-driven decisions. Conversely, the drain on time and development resources emerged as a significant deterrent to high performance.\n\nRecognizing the critical role of data in strategic decision-making and prioritization, we're excited to announce an upcoming initiative, [Improved Data Filtering and Visualization](https://gitlab.com/groups/gitlab-org/-/epics/5516). This initiative focuses on the usability, flexibility, and efficiency of GitLab's planning views, simplifying how users interact with, recall, and share project data.\n\n## Streamlining data interaction and retrieval\n\nWe're focusing on consolidating multiple views into a unified platform, eliminating confusing navigation, and enabling users to create complex queries in a user-friendly manner. This approach not only makes data more accessible but also empowers users to visualize it in formats that best suit their needs. By providing a single hub for data access and presentation, we're making it easier for users to obtain critical information they need to make more informed, data-driven decisions quickly, leading to streamlined workflows and elevated project outcomes.\n\n### The evolution from multiple views to a single, configurable experience\n\nHere is what it looks like when you evolve from multiple views to a single, configurable experience:\n\n![Image of moving from multiple views to a single view](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099112/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750099112252.png)\n\n## Transforming workflows with real-world applications\n\nImagine a scenario where you must review the progress of multiple interconnected projects. Traditionally, this would involve navigating through various parts of GitLab, each with its own set of filters and views. With our Improved Data Filtering and Visualization initiative, you will now access a consolidated view that allows for creating intuitive queries. This new view can display issues and epics in a nested format, providing the hierarchical context you need to understand your project structure fully. What's more, you will have the ability to easily switch to a roadmap or board view as your use case demands.\n\nAnother example involves a development team planning their upcoming sprint. Instead of juggling between different pages for issues, epics, and boards, the team can utilize a single, customized view that allows them to view the context of related work items, update the status of work without opening multiple tabs, and understand work item dependencies. This saves precious synchronous time for the team and creates an efficient workflow by bringing visibility teams need to the forefront.\n\n## Engage with us!\n\nAs we venture into this transformative initiative, your insights and feedback will become the backbone of our progress. We're not merely enhancing features; we're on a mission to revolutionize how product planning can be successful within GitLab. Your insights will help turn this ambitious vision into reality.\n\nDelve into our proposed feature iterations [detailed within our initiative](https://gitlab.com/groups/gitlab-org/-/epics/5516#feature-iterations) and leave your comments on the epic. Your perspective on these enhancements is invaluable, helping us refine our approach and ensure it aligns with your needs and expectations.\n\nThis is more than a call to action — it's an invitation to shape the future of GitLab together. Share your feedback, suggestions, and visions with us!\n\n## Leverage Agile planning with GitLab\n\nBuilding on our commitment to streamlining workflows with the Improved Data Filtering and Visualization initiative, it's worth highlighting that GitLab also deeply integrates Agile delivery principles to enhance software development lifecycles. Discover how GitLab bridges strategy with execution, enabling teams to adopt Agile methodologies like Scrum and Kanban, and scale with frameworks such as [SAFe](https://scaledagileframework.com/) and [LeSS](https://less.works/less/framework).\n\n> Explore more about [enhancing Agile Delivery with GitLab](https://about.gitlab.com/solutions/agile-delivery/) and drive faster value creation.\n",[908,9,1319,717],{"slug":1664,"featured":6,"template":698},"enhancing-gitlab-with-improved-data-filtering-and-visualizations","content:en-us:blog:enhancing-gitlab-with-improved-data-filtering-and-visualizations.yml","Enhancing Gitlab With Improved Data Filtering And Visualizations","en-us/blog/enhancing-gitlab-with-improved-data-filtering-and-visualizations.yml","en-us/blog/enhancing-gitlab-with-improved-data-filtering-and-visualizations",{"_path":1670,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1671,"content":1677,"config":1683,"_id":1685,"_type":14,"title":1686,"_source":16,"_file":1687,"_stem":1688,"_extension":19},"/en-us/blog/expanding-guest-capabilities-in-gitlab-ultimate",{"title":1672,"description":1673,"ogTitle":1672,"ogDescription":1673,"noIndex":6,"ogImage":1674,"ogUrl":1675,"ogSiteName":685,"ogType":686,"canonicalUrls":1675,"schema":1676},"The feature you wanted - Expanded Guest capabilities in GitLab Ultimate","GitLab Ultimate customers can now provide Guests the ability to view code. Learn how to access this new capability.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749682652/Blog/Hero%20Images/iterating-cover.jpg","https://about.gitlab.com/blog/expanding-guest-capabilities-in-gitlab-ultimate","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"The feature you wanted - Expanded Guest capabilities in GitLab Ultimate\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Hannah Sutor\"}],\n        \"datePublished\": \"2023-03-08\",\n      }",{"title":1672,"description":1673,"authors":1678,"heroImage":1674,"date":1680,"body":1681,"category":928,"tags":1682},[1679],"Hannah Sutor","2023-03-08","\n\n[Customizable roles](https://docs.gitlab.com/ee/user/permissions.html) have been on GitLab's roadmap for the past two years. When we began working on them a year ago, our team struggled to find the [minimal viable change](https://handbook.gitlab.com/handbook/product/product-principles/#the-minimal-viable-change-mvc) (MVC) that would benefit customers. At the same time, through different feedback channels, customers were telling us they wanted more from their Ultimate tier Guest user roles. There it was: our MVC!\n\nHere is what happened next.\n\n## Our MVC journey\n\nWhen we began working on customizable roles, we knew that the six static, out-of-the-box roles that come with GitLab were not flexible enough to cover the use cases of our customers. Some roles were too permissive, while others didn’t grant the permissions necessary to accomplish a task. At a time when security and abiding by [the principle of least privilege](https://www.techtarget.com/searchsecurity/definition/principle-of-least-privilege-POLP) is more top of mind than ever, we needed to give our customers a way to define their own roles.\n\nThe customer ask was clear, but the implementation path was not. Performance considerations were top of mind. Permission policies are evaluated when any user action is performed, and we need a secure but scalable way for thousands of users, who may have hundreds of custom roles created, to do their work in GitLab. The team did a lot of technical discovery and performance testing in order to ensure the chosen technical implementation was scalable.\n\nWe decided to start with a very small implementation of custom roles - something that would be meaningful to customers, while also allowing our team to test the new backend implementation that will support custom role creation and usage.\n\n## How custom roles work\n\nFor our MVC, we decided that GitLab.com customers with an Ultimate license should be able to create a custom role that is based on the current “Guest” role. They will be able to add one additional permission to the “Guest” role - the ability to view code. This effectively creates a “Guest+1” role. They can then assign this custom role to any existing user.\n\nPreviously, Guests were able to view code on Self-Managed GitLab, and only on internal or public projects. Now, this functionality is available to Guests across the board - in GitLab.com and Self-Managed GitLab, and regardless of project visibility setting. You just need to create and apply the custom Guest role to any user who wishes to view code.\n\nYou can read more about how to [implement this yourself](https://docs.gitlab.com/ee/user/permissions.html#custom-roles) and watch a demo [here](https://about.gitlab.com/releases/2023/02/22/gitlab-15-9-released/#users-with-the-guest-role-can-view-private-repositories).\n\n## Create a custom role\n\nUse the API to create the “Guest+1” custom role. This role will show up as \"Guest - custom\" in the UI, so that it's easy to see which users have this version of the \"Guest\" role assigned.\n\nOnce the custom role is created, you can [use the API](https://docs.gitlab.com/ee/user/permissions.html#custom-roles) to associate it to a list of users. Voila! Now, your users have a custom role that allows them to view code as a Guest.\n\n![customizable guest role](https://about.gitlab.com/images/blogimages/iterating-towards-customizable-roles/guest-custom-role.png){: .shadow}\n\n## Why this MVC?\n\nSometimes, something is so loud that you’re forced to listen to it. That’s undoubtedly how I felt when I heard the dissatisfaction of our Ultimate customers around Guest users in private projects.\n\nAn unlimited number of Guest users are free with a GitLab Ultimate subscription. However, if the Guest user doesn’t have enough access to really do much within the product, is it really of any value at all? Customers left us a lot of feedback that the low level of privilege the Guest users have for private projects was detrimental to their users' workflows - making those “free” users not actually useful at all. We knew it was time to deliver more value.\n\n## What’s next\n\nOur final vision for customizable roles in GitLab is for our users to be able to take what exists today in our [permissions table](https://docs.gitlab.com/ee/user/permissions.html) and toggle each permission off/on as they please to define a custom role.\n\nWe plan to start on this by [consolidating](https://gitlab.com/groups/gitlab-org/-/epics/8914) some of these permissions - both for practical and performance reasons. As you can imagine, some permissions don’t make sense to be toggled “on” if a different feature is “off.\" We will be removing the need for complex logic by consolidating permissions into larger sets that make sense to be enabled/disabled at the same time. This should also translate nicely on the usability side - permutations of 100+ individual permissions would be unwieldy to manage, as a systems administrator, and difficult to understand your role definition, as an end user.\n\nThis update to custom roles is a great example of our iteration value here at GitLab, and I’m most excited about the fact that it’s solving an acute pain point for our Ultimate customers. They deserve to get a lot of value out of their Ultimate subscription, and I am hopeful that an additional permission for Guest users is one way we can increase their value. It’s also a great first step towards our grand customizable roles vision. I hope you’ll give it a try!\n\n**Check out this demo that shows the customizable guest role in action:**\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/46cp_-Rtxps\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n",[695,1319,9],{"slug":1684,"featured":6,"template":698},"expanding-guest-capabilities-in-gitlab-ultimate","content:en-us:blog:expanding-guest-capabilities-in-gitlab-ultimate.yml","Expanding Guest Capabilities In Gitlab Ultimate","en-us/blog/expanding-guest-capabilities-in-gitlab-ultimate.yml","en-us/blog/expanding-guest-capabilities-in-gitlab-ultimate",{"_path":1690,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1691,"content":1697,"config":1704,"_id":1706,"_type":14,"title":1707,"_source":16,"_file":1708,"_stem":1709,"_extension":19},"/en-us/blog/finserv-how-to-implement-gitlabs-separation-of-duties-features",{"title":1692,"description":1693,"ogTitle":1692,"ogDescription":1693,"noIndex":6,"ogImage":1694,"ogUrl":1695,"ogSiteName":685,"ogType":686,"canonicalUrls":1695,"schema":1696},"FinServ: How to implement GitLab's separation of duties features","Learn how GitLab ensures secure, compliant software development with separation of duties in the financial services sector, including features that help adhere to regulatory frameworks.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097688/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%286%29_6vL96ttKF8zJLLqfPpvFs_1750097687913.png","https://about.gitlab.com/blog/finserv-how-to-implement-gitlabs-separation-of-duties-features","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"FinServ: How to implement GitLab's separation of duties features\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Cherry Han\"},{\"@type\":\"Person\",\"name\":\"Gavin Peltz\"}],\n        \"datePublished\": \"2024-08-13\",\n      }",{"title":1692,"description":1693,"authors":1698,"heroImage":1694,"date":1701,"body":1702,"category":693,"tags":1703},[1699,1700],"Cherry Han","Gavin Peltz","2024-08-13","Throughout software development, robust security and compliance measures are required, especially in industries like financial services where data integrity and regulatory adherence are non-negotiable. One critical aspect of maintaining these standards is separation of duties (SoD). SoD ensures that no individual has complete control over a process from beginning to end, thereby reducing the risk of errors and unauthorized activities. SoD mitigates software supply chain risks by preventing external and malicious acts that could compromise the integrity of the software development process.\n\n## Importance of SoD in the financial services industry\n\nIn the financial services sector, SoD plays a pivotal role in safeguarding sensitive information and upholding regulatory compliance. Here’s how SoD contributes strategically to the industry:\n\n* **Risk mitigation:** By distributing responsibilities across different roles, SoD reduces the risk of errors, fraud, and unauthorized activities that could compromise system integrity or regulatory compliance.\n* **Enhanced accountability:** Clear division of duties guarantees that no individual can independently initiate, authorize, and execute a process from start to finish. This promotes transparency and accountability, which are crucial for maintaining trust with stakeholders and regulatory bodies.\n* **Regulatory compliance:** SoD is mandated by financial regulations so that sensitive operations are conducted with oversight and scrutiny. Compliance with these standards not only avoids penalties, but also protects the organization's reputation.\n* **Operational resilience:** By decentralizing decision-making and execution, organizations become less susceptible to disruptions caused by human errors, malicious actions, or unexpected events.\n\n## GitLab for SoD and best practices\nGitLab provides end-to-end separation of duties covering the DevSecOps workflow.\n\n![FinServ SOD - image 1](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097695/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097695697.png)\n\nThe diagram above illustrates the integration of key elements like merge request approval policies, protected features, user permissions, compliance frameworks, and audit events, all working together to uphold the principles of SoD. Each of these components is detailed in the sections below, demonstrating how to establish a secure and compliant development environment.\n\n### Merge request approval policies\n\nOne challenge the financial services industry faces is the implementation of approval mechanisms that prevent unauthorized or unchecked changes from being integrated. This is where [merge request approval policies](https://docs.gitlab.com/ee/user/application_security/policies/scan-result-policies.html) come into play. These policies enforce the separation of duties between security and development, preventing individual developers from approving their own code changes if they contain vulnerabilities, and development teams from deploying their code directly to production environments without appropriate oversight. \n\nWhen creating a policy, it’s advisable to consider who would be an appropriate approver. This can be defined as an individual user, group such as the Application Security team, or role type such as a Maintainer. To implement further restrictions, please consider these key policy features:\n\n- Prevent approval by author: This policy puts a guardrail in place so that a merge request author cannot approve their own changes. By requiring independent review, this policy helps maintain objectivity and impartiality in the approval process.\n\n- Prevent approvals by users who add commits: Users who have added commits to a merge request are also prevented from approving it. This further enforces the principle of independent review so changes are scrutinized by team members who are not directly involved in the modifications.\n\n- Prevent editing approval rules: To maintain the integrity of the approval process, GitLab allows administrators to prevent editing approval rules at the project or merge request level. This guarantees that once approval policies are defined, they cannot be bypassed or altered by unauthorized users.\n\n- Require user password to approve: For an added layer of security, GitLab can require users to enter their password to approve a merge request. \n\nTo maintain a clear separation of duties, it is advisable to [create a separate top-level group](https://docs.gitlab.com/ee/user/application_security/policies/#enforce-policies-globally-in-gitlab-dedicated-or-your-gitlab-self-managed-instance) dedicated to housing your security policies, including merge request approval policies. This setup minimizes the number of users who inherit permissions and enforces tighter control over policy management. From this separate group, you can [link security policy projects](https://docs.gitlab.com/ee/user/application_security/policies/#link-to-a-security-policy-project) at the highest group level that aligns with your objectives, reducing policy management overhead and providing comprehensive coverage across your development environment.\n\nIt's also important to note that when a policy is enabled by default, it applies to all projects within the associated linked groups, subgroups, and individual projects. If you want to enforce policies more selectively, GitLab recommends you scope your policies to a [compliance framework label](https://docs.gitlab.com/ee/user/group/compliance_frameworks.html). Commonly, our highly regulated customers will architect compliance labels that correspond with regulatory requirements, like “SOX” and “PCI.\" This link to a framework also enables the [native compliance center](https://docs.gitlab.com/ee/user/compliance/compliance_center/) to manage security policies tailored to various use cases.\n\n### Compliance frameworks and controls\n\nCustomers in regulated industries face significant challenges in maintaining compliance in large organizations. Manual processes are prone to errors, and maintaining consistent enforcement of policies across teams can be difficult.\n\nBy using GitLab's compliance frameworks, organizations can automate and administer preventive measures, systematically manage risks, and enforce regulatory compliance seamlessly. These frameworks can enforce security protocols and custom jobs across any pipeline. \n\nTo safeguard compliance settings at the organizational level, GitLab allows only group or project owners to add or remove compliance frameworks. This measure blocks development teams or managers from altering compliance configurations without appropriate permission levels, providing an additional layer of security. It’s important to note that if an individual with Maintainer permission is allowed to create a subgroup, they become the owners of that subgroup and can change the compliance framework. This can be prevented by [limiting who can create subgroups](https://docs.gitlab.com/ee/user/group/subgroups/#change-who-can-create-subgroups) under permissions and group settings.\n\n## SoD through permissions and roles\n\nTo effectively enforce the separation of duties in the financial services industry, it's essential to establish clear and precise access control. GitLab provides a tiered [permissions model](https://docs.gitlab.com/ee/user/permissions.html) with predefined roles such as Guest, Reporter, Developer, Maintainer, and Owner. Each role has a specific set of permissions so individuals can perform their duties without overstepping boundaries, which could lead to conflicts of interest or security risks. GitLab recommends assigning roles following the [principle of least privilege access](https://about.gitlab.com/blog/the-ultimate-guide-to-least-privilege-access-with-gitlab/).\n\nFor organizations with granular needs, particularly those using GitLab Ultimate, [custom roles](https://docs.gitlab.com/ee/user/custom_roles.html) offer even greater flexibility. These roles allow organizations to define specific permissions tailored to their unique workflows and compliance requirements. This is particularly useful in enforcing the separation of duties because no individual can perform conflicting tasks.\n\nA common use case is the need for a deployer role — individuals who need to deploy jobs but should not have access to edit or push code. GitLab addresses this requirement through the use of [protected environments](https://docs.gitlab.com/ee/ci/environments/protected_environments.html#protecting-environments). Protected environments allow you to [invite groups approved to deploy jobs](https://docs.gitlab.com/ee/ci/environments/protected_environments.html#deployment-only-access-to-protected-environments) while limiting the role of users to Reporters. Please note that the deployment job should include the environment keyword. This configuration enables users to deploy jobs without the ability to edit the code, ensuring compliance requirements are met. \n\nBy carefully defining and enforcing roles and permissions, organizations can create a secure and compliant development environment. If you’d like to review your user permissions on a broader scale, you can use this [Group Member report](https://gitlab.com/gitlab-com/cs-tools/gitlab-cs-tools/gitlab-group-member-report) to see how many members of a role are in your environment and evaluate the next steps accordingly.\n\n## Protected features\nGitLab offers several “protected” features to enforce additional layers of control over your development process. These features can be vital for maintaining SoD so that only designated individuals can make significant changes.\n\n- Protected branches: A protected branch restricts who can push, merge, or force push to the branch. This is particularly beneficial for branches like “main” or “production,\" so that only authorized users can make modifications.\n- Protected Git tags: These tags allow control over who has permission to create tags. This prevents accidental updates or deletions once the tag is created, preserving the integrity of your versioning.\n- Protected environments: Protecting specific environments, especially productions, from unauthorized access is imperative. In a protected environment, only users with the appropriate privileges can deploy to it, safeguarding the environment from unintended changes. This ties back to the deployer role functionality mentioned earlier, where individuals can deploy jobs without editing the code, establishing compliance and security.\n- Protected packages: Using package protection rules restricts which users can make changes to your packages. \nThese protected features collectively help maintain a secure and compliant development environment that aligns with the principles of SoD.\n\n## Audit event and compliance center\nHaving discussed approval policies, compliance frameworks, roles, and protected features, the final step is how GitLab allows you to monitor and audit these implementations to guarantee adherence. GitLab's [audit events](https://docs.gitlab.com/ee/user/compliance/audit_events.html) provide a detailed record of activities and changes, such as user activity and project modifications, for owners and admins. This logging is vital for tracking user actions and detecting unauthorized behavior. [Audit event streaming](https://docs.gitlab.com/ee/user/compliance/audit_event_streaming.html) enhances this by allowing organizations to stream audit events to external systems for real-time analysis and alerting. By doing so, any alterations or violations are detected, allowing swift remediation.\n\nThe [Compliance Center in GitLab](https://docs.gitlab.com/ee/user/compliance/compliance_center/) is a centralized hub for managing and monitoring compliance activities. It provides an overview of compliance status across projects and groups, highlighting violations of merge request approval rules or other policies. Administrators can promptly address issues, certifying adherence to predefined compliance standards. This centralized approach simplifies compliance management, maintaining a high level of oversight and control.\n\n> If you are interested in learning more about GitLab’s thoughts on SoD and compliance, check out the  [GitLab Govern product direction](https://about.gitlab.com/direction/govern/) and the [GitLab compliance documentation](https://docs.gitlab.com/ee/administration/compliance.html). \n\n## Read more\n\n- [Meet regulatory standards with GitLab compliance & security policy management](https://about.gitlab.com/blog/meet-regulatory-standards-with-gitlab/)\n- [Building GitLab with GitLab: Expanding our security certification portfolio](https://about.gitlab.com/blog/building-gitlab-with-gitlab-expanding-our-security-certification-portfolio/)\n- [Online retailer bol tackles growing compliance needs with GitLab](https://about.gitlab.com/blog/online-retailer-bol-tackles-growing-compliance-needs-with-gitlab/)",[693,9,717,570],{"slug":1705,"featured":6,"template":698},"finserv-how-to-implement-gitlabs-separation-of-duties-features","content:en-us:blog:finserv-how-to-implement-gitlabs-separation-of-duties-features.yml","Finserv How To Implement Gitlabs Separation Of Duties Features","en-us/blog/finserv-how-to-implement-gitlabs-separation-of-duties-features.yml","en-us/blog/finserv-how-to-implement-gitlabs-separation-of-duties-features",{"_path":1711,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1712,"content":1718,"config":1724,"_id":1726,"_type":14,"title":1727,"_source":16,"_file":1728,"_stem":1729,"_extension":19},"/en-us/blog/five-reasons-to-simplify-agile-planning-tool-configuration-gitlab",{"title":1713,"description":1714,"ogTitle":1713,"ogDescription":1714,"noIndex":6,"ogImage":1715,"ogUrl":1716,"ogSiteName":685,"ogType":686,"canonicalUrls":1716,"schema":1717},"5 reasons to simplify your agile planning tool configuration with GitLab","Get more from your agile planning with a platform that supports automation, protects the user experience, and reduces time spent on tool configurations.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749665084/Blog/Hero%20Images/agile.png","https://about.gitlab.com/blog/five-reasons-to-simplify-agile-planning-tool-configuration-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"5 reasons to simplify your agile planning tool configuration with GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Melissa Ushakov\"}],\n        \"datePublished\": \"2023-10-17\",\n      }",{"title":1713,"description":1714,"authors":1719,"heroImage":1715,"date":1721,"body":1722,"category":1053,"tags":1723},[1720],"Melissa Ushakov","2023-10-17","\nAgile planning tools offer an opportunity to guide team members through a workflow and to follow best practices. In some agile planning tools, like Atlassian Jira, this is done by configuring enforced status workflows for agile planning work items. However, enforced status workflows can slow response times, impact team autonomy, and have other negative effects. Instead, let's explore how a single DevSecOps platform with built-in agile tooling can simplify the process — and how GitLab can help.\n\n## What are enforced status workflows?\nEnforced status workflows define the statuses a work item can be in, the allowed status transitions, the data elements needed to perform a status transition, and which users can perform a status transition. In a typical Jira-user scenario, developers have to manually fill in required data and advance an item through a prescribed status workflow that may not account for exceptions. Enforced workflows in an agile planning tool can limit creativity and unnecessarily block progress when exceptions are encountered, hindering the team's ability to respond swiftly to changing priorities and limiting their autonomy in delivering the best business outcomes.\n\n![status workflows](https://about.gitlab.com/images/blogimages/2023-09-26-say-no-to-complex-status-enforcements-in-your-agile-planning-tool/status_workflow_example.png){: .shadow.small.center}\nExample of an enforced workflow preventing certain status transitions\n{: .note.text-center}\n\n\n## Guiding users through a workflow with GitLab\nHere are five ways GitLab simplifies your agile planning tool configuration.\n\n### 1. Statuses are enforced close to the work for better efficiency.\nCompliance and security checks are necessary, and organizations should ensure that work goes through the proper reviews and that the right team members are involved. Agile planning work items are an artifact that helps align the scope of work of a deliverable, and the work to make that scope a reality happens in downstream in merge requests. Many times, enforcements are better suited to be applied through [approval rules](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/rules.html), [compliance pipelines](https://docs.gitlab.com/ee/user/group/compliance_frameworks.html#compliance-pipelines), or [deployment approvals](https://docs.gitlab.com/ee/ci/environments/deployment_approvals.html). This leads to a greater assurance that the code changes have followed the right process, which matters most. For example, instead of having an enforced status transition in your work items for a security review, you can create [merge request approval rules](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/rules.html), setting the members of your security team as approvers. [Deployment approvals](https://docs.gitlab.com/ee/ci/environments/deployment_approvals.html) can be set up so that your quality assurance lead needs to approve the deployment to specific environments.\n\n![status workflows](https://about.gitlab.com/images/blogimages/2023-09-26-say-no-to-complex-status-enforcements-in-your-agile-planning-tool/unified_approval_rules_v16_0.png){: .shadow.small.center}\nThe production environment requires five total approvals, and allows deployments from only the \"Very Important Group\" and the user administrator.\n{: .note.text-center}\n\n\n### 2. Automation improves data quality.\nThis may seem counterintuitive, but when you place too many restrictions on what data entry is allowed, people stop updating their work items, leading to poor data quality. Working through exceptions to the process is cumbersome, and users need to misreport activity to close the work item when work is done. For example, it's common for users in a tool with workflow enforcements to quickly advance an issue through statuses that didn’t happen so that it could be closed when work has been completed. What if you could intelligently derive the status of work instead of relying on people updating issues? With [GitLab triage](https://gitlab.com/gitlab-org/ruby/gems/gitlab-triage), automation can add or remove labels, set a health status based on downstream DevOps activity, and more. Your developers can spend more time on product innovation and driving your business objectives forward instead of manually updating details, which automation can handle.\n\n![bot labels](https://about.gitlab.com/images/blogimages/2023-09-26-say-no-to-complex-status-enforcements-in-your-agile-planning-tool/gitlab_bot_health.png){: .shadow.medium.center}\nUpdate health with automation\n{: .note.text-center}\n\n\n![bot health](https://about.gitlab.com/images/blogimages/2023-09-26-say-no-to-complex-status-enforcements-in-your-agile-planning-tool/gitlab_bot_labels.png){: .shadow.medium.center}\nUpdate labels with automation\n{: .note.text-center}\n\n\n### 3. The user experience is protected.\nAt GitLab, we prioritize [convention over configuration](https://handbook.gitlab.com/handbook/product/product-principles/#convention-over-configuration) as one of our product principles to deliver exceptional user experiences. This is why GitLab avoids using heavy-handed enforced status workflows in agile planning tools, which end users often dislike. These workflows create friction and hinder your team's productivity. Instead, teams should encourage transparency by explaining the importance of each step in the process, fostering understanding and buy-in. GitLab enables teams to represent a status flow with boards that help to understand the intended flow. A simplified planning tool setup within the DevSecOps platform drives collaboration, transparency, and trust across all roles, leading to a better full-team experience where developers can deliver more value faster.\n\n![workflow board](https://about.gitlab.com/images/blogimages/2023-09-26-say-no-to-complex-status-enforcements-in-your-agile-planning-tool/workflow_board.png){: .shadow.medium.center}\nBoards guide your team through a sequence of statuses\n{: .note.text-center}\n\n\n### 4. Developer tool integration is simplified.\nOrganizations often have a rich ecosystem of tools, and the inability to fully integrate with a critical tool like your agile planning solution limits efficiency and flexibility, potentially leading to time-consuming manual processes. When your status process flow is too restrictive, it is almost impossible to update data from an integration accurately, leading to data mismatches between systems. Other vendors, like Atlassian, have resorted to advising against updating data via an integration. At GitLab, we embrace developer tool preferences and have a comprehensive [suite of APIs and webhooks](https://docs.gitlab.com/ee/api/).\n\n### 5. Time spent on tool configuration is reduced.\nAgile planning tools require close management of configurations. The more prescriptive you make workflows, the more time you have to spend tending to those configurations. With GitLab, automated workflow controls require less effort and enable greater team autonomy. By embracing GitLab's flexible configuration options, teams can spend less time on agile planning tool management and innovate more efficiently.\n\n## Getting started with GitLab\nWith a simplified agile planning tool setup in GitLab that takes advantages of modern DevSecOps practices, you can increase efficiency, encourage collaboration, and get to market faster – all while improving the developer experience.\n\nGet started today with the [Jira importer](https://docs.gitlab.com/ee/user/project/import/jira.html).\n",[908,9,951,717],{"slug":1725,"featured":6,"template":698},"five-reasons-to-simplify-agile-planning-tool-configuration-gitlab","content:en-us:blog:five-reasons-to-simplify-agile-planning-tool-configuration-gitlab.yml","Five Reasons To Simplify Agile Planning Tool Configuration Gitlab","en-us/blog/five-reasons-to-simplify-agile-planning-tool-configuration-gitlab.yml","en-us/blog/five-reasons-to-simplify-agile-planning-tool-configuration-gitlab",{"_path":1731,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1732,"content":1738,"config":1743,"_id":1745,"_type":14,"title":1746,"_source":16,"_file":1747,"_stem":1748,"_extension":19},"/en-us/blog/get-to-know-the-gitlab-wiki-for-effective-knowledge-management",{"title":1733,"description":1734,"ogTitle":1733,"ogDescription":1734,"noIndex":6,"ogImage":1735,"ogUrl":1736,"ogSiteName":685,"ogType":686,"canonicalUrls":1736,"schema":1737},"Get to know the GitLab Wiki for effective knowledge management","The GitLab Wiki helps organizations benefit from Agile planning and knowledge management. Learn best practices for using this powerful tool in your DevSecOps environment.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749660011/Blog/Hero%20Images/blog-image-template-1800x945__21_.png","https://about.gitlab.com/blog/get-to-know-the-gitlab-wiki-for-effective-knowledge-management","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Get to know the GitLab Wiki for effective knowledge management\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Matthew Macfarlane\"}],\n        \"datePublished\": \"2024-07-17\",\n      }",{"title":1733,"description":1734,"authors":1739,"heroImage":1735,"date":1740,"body":1741,"category":1053,"tags":1742},[1559],"2024-07-17","Effective knowledge management is a cornerstone of Agile planning success for organizations across all industries. The [GitLab Wiki](https://docs.gitlab.com/ee/user/project/wiki/), part of the GitLab DevSecOps platform, is a powerful tool that supports this endeavor. With the GitLab Wiki, teams can streamline [Agile planning](https://about.gitlab.com/solutions/agile-delivery/) processes, enhance collaboration, and ensure that valuable information is accessible and up to date, all within a single platform. In this article, you will learn how to harness the GitLab Wiki for effective knowledge management.\n\n## What is the GitLab Wiki?\n\nThe GitLab Wiki is available at both the Project and Group levels, and allows teams to create, organize, and share documentation directly within their GitLab instances. It's a flexible and user-friendly wiki that supports multiple formats, including Markdown, RDoc, AsciiDoc, and Org, making it easy to present information in a readable manner.\n\n## Benefits of using GitLab Wiki for knowledge management\n\nThere are numerous benefits to using the GitLab Wiki for knowledge management as part of an overall Agile planning strategy. Here are four examples:\n\n1. **Centralized information repository:** The GitLab Wiki serves as a single source of truth where all knowledge can be stored. This centralization ensures that team members can easily find the information they need without having to leave the platform for an external solution.\n2. **Collaboration and accessibility:** Being integrated into GitLab helps to foster collaboration and ensures that everyone has access to the latest information.\n3. **Version control:** The GitLab Wiki is bolstered by GitLab's robust version control system. This means every change is tracked, and previous versions can be restored if necessary. This is crucial for maintaining the integrity of documentation over time.\n4. **Templates:** Templates ensure that the content across different pages maintains a consistent format and style, making the documentation more professional and easier to navigate. Templates also save time by providing a predefined structure that can be reused. This reduces the effort required to create new pages or update existing ones.\n\n## Best practices for knowledge management in the GitLab Wiki\n\nHere are five best practices to follow when using the GitLab Wiki:\n\n1. **Organize content logically:** Structure your wiki with clear, logical pages and sub-pages. Use categories to group related information and ensure that the hierarchy is intuitive. This makes it easier for users to navigate and find what you need.\n2. **Standardize documentation practices:** Establish and enforce guidelines for documentation, including consistent formatting, naming conventions, and content structure. Templates, as mentioned before, can help with this.\n3. **Perform regular updates and reviews:** Unlike an issue or epic, wiki pages never close. To ensure the accuracy of wiki pages it's important to schedule regular reviews and updates. Encourage team members to contribute updates as they encounter changes in their work.\n4. **Encourage collaboration:** Foster a culture where team members are encouraged to contribute to the Wiki. This could be through regular knowledge-sharing sessions, incentives for contributions, or integrating documentation updates into daily workflows.\n5. **Link wiki pages to issues and epics:** Use GitLab’s auto-complete capability to link wiki pages with issues and epics. Linking can help your team better locate and reference information throughout a project's lifecycle.\n\n## Get started with the GitLab Wiki\n\nThe GitLab Wiki helps organizations follow Agile planning principles and attain effective knowledge management by ensuring documentation remains a valuable resource. Leveraging GitLab Wiki for knowledge management not only enhances productivity but also fosters a culture of continuous learning and improvement.\n\nIncorporate these strategies into your GitLab Wiki practice and watch as your team's efficiency and collaboration reach new heights. To learn more about the GitLab Wiki, check out [our documentation wiki](https://docs.gitlab.com/ee/user/project/wiki/).\n\n> Try the GitLab Wiki for yourself with a [free trial of GitLab Ultimate](https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/blog&glm_content=default-saas-trial).",[908,9,717],{"slug":1744,"featured":6,"template":698},"get-to-know-the-gitlab-wiki-for-effective-knowledge-management","content:en-us:blog:get-to-know-the-gitlab-wiki-for-effective-knowledge-management.yml","Get To Know The Gitlab Wiki For Effective Knowledge Management","en-us/blog/get-to-know-the-gitlab-wiki-for-effective-knowledge-management.yml","en-us/blog/get-to-know-the-gitlab-wiki-for-effective-knowledge-management",{"_path":1750,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1751,"content":1757,"config":1761,"_id":1763,"_type":14,"title":1764,"_source":16,"_file":1765,"_stem":1766,"_extension":19},"/en-us/blog/get-to-know-the-security-and-governance-updates-in-gitlab-17-17-1",{"title":1752,"description":1753,"ogTitle":1752,"ogDescription":1753,"noIndex":6,"ogImage":1754,"ogUrl":1755,"ogSiteName":685,"ogType":686,"canonicalUrls":1755,"schema":1756},"Get to know the security and governance updates in GitLab 17, 17.1","Dive deep into the new enhancements that can strengthen your organization's security posture, including how-to videos for SAST, DAST, API security, container registry, and more.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098858/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_282096522_securitycompliance.jpeg_1750098857843.jpg","https://about.gitlab.com/blog/get-to-know-the-security-and-governance-updates-in-gitlab-17-17-1","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Get to know the security and governance updates in GitLab 17, 17.1\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2024-07-17\",\n      }",{"title":1752,"description":1753,"authors":1758,"heroImage":1754,"date":1740,"body":1759,"category":693,"tags":1760},[1599],"With every GitLab release we enhance and optimize security and governance\nsolutions to ensure customers have the tools they need to produce secure and\ncompliant software. Our values of\n[iteration](https://handbook.gitlab.com/handbook/values/#iteration) and\n[results for\ncustomers](https://handbook.gitlab.com/handbook/values/#results) drive our\nrelease cycles, and GitLab 17 is no exception. We have been releasing every\nmonth for the past 153 months straight!\n\n\nIn this article, you'll learn my favorite security and governance\nenhancements released in GitLab 17 and 17.1 and how they can benefit your\norganization’s security requirements. \n\n\n- [SAST analyzer streamlining](#sast-analyzer-streamlining)\n\n- [Android dependency scanning](#android-dependency-scanning)\n\n- [Custom roles and granular security permissions\nupdates](#custom-roles-and-granular-security-permissions-updates)\n\n- [Secret detection updates](#secret-detection-updates)\n\n- [Container registry updates](#container-registry-updates)\n\n- [API security scanning updates](#api-security-scanning-updates)\n\n\n## SAST analyzer streamlining\n\n\nGitLab provides static application security testing\n([SAST](https://docs.gitlab.com/ee/user/application_security/sast/)) to\nexamine your source code for known vulnerabilities, detecting\nvulnerabilities such as SQL injections and cross-site scripting. When SAST\nkicks off, the programming language used is auto-detected and the\nappropriate scanner is loaded.\n\n\nIn GitLab 17, SAST scans the same languages, but now with fewer analyzers,\n[offering a simpler and more customizable\nexperience](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#streamlined-sast-analyzer-coverage-for-more-languages).\nLanguage-specific analyzers have been replaced with GitLab-managed rules in\nthe Semgrep-based analyzer for the following languages:\n\n\n- C/C++\n\n- Swift (iOS)\n\n- Java/Kotlin (Android)\n\n- Node.js\n\n- PHP\n\n- Ruby\n\n\nHaving one analyzer for many different languages makes configurations and\nwriting rules easier than ever. See the [supported languages and frameworks\ndocumentation](https://docs.gitlab.com/ee/user/application_security/sast/#supported-languages-and-frameworks)\nfor more information.\n\n\nWatch this video to learn more:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/_80z6mZmzek?si=i9yPQttxuwVcb7Ye\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## Android dependency scanning\n\n\nIn modern software development, many applications are built from multiple\ndependencies that are best at performing their intended function. For\nexample, rather than writing a YAML parser, a developer will use a library\nthat parses YAML. This allows developers to focus on the main goal of their\napplication, rather than spending time on utility functions.\n\n\nWhile the use of dependencies speeds up efficiency, they can be difficult to\nmanage and could introduce vulnerabilities to your application. For this,\nGitLab provides [dependency\nscanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/),\nwhich analyzes dependencies for known vulnerabilities. \n\n\nMany organizations are using dependencies even when creating native mobile\napplications. In GitLab 17, we introduced [Android dependency\nscanning](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#dependency-scanning-support-for-android)\nto bridge the gap. Android dependency scanning can be easily added as a\n[CI/CD catalog\ncomponent](https://gitlab.com/explore/catalog/components/android-dependency-scanning)\n– just include the following code in your `.gitlab-ci.yml`:\n\n\n```\n\ninclude:\n  - component: gitlab.com/components/android-dependency-scanning/component@1.0.0\n    inputs:\n      stage: test\n```\n\n\nThis job will also generate a CycloneDX software bill of materials\n([SBOM](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/))\nreport, which may be necessary for compliance. Make sure to scan your\nAndroid dependencies as soon as possible, as there are many CVEs out there.\n\n\nWatch this video to learn more:\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/_80z6mZmzek?si=DdB7j4NAenl-UcrJ\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n> Learn more SBOMs and dependencies with [our ultimate guide to\nSBOMs](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/).\n\n\n## Custom roles and granular security permissions updates\n\n\nGitLab provides [custom\nroles](https://docs.gitlab.com/ee/user/custom_roles.html) to allow\norganizations to create user roles with the precise privileges and\npermissions to meet their needs. This enables organizations to [implement\nthe principle of least\nprivilege](https://about.gitlab.com/blog/the-ultimate-guide-to-least-privilege-access-with-gitlab/)\nto adhere to various compliance standards.\n\n\n![custom roles\nscreenshot](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098874/Blog/Content%20Images/Blog/Content%20Images/1_aHR0cHM6_1750098873857.png)\n\n\nIn GitLab 17, managing custom roles has become easier than ever. You can now\n[edit a custom role and its permissions directly from the\nUI](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#edit-a-custom-role-and-its-permissions),\nwhereas, in the past, the role needed to be recreated. Also, for those using\nGitLab self-managed, [custom roles are now managed at the instance\nlevel](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#manage-custom-roles-at-self-managed-instance-level),\nallowing administrators to create the roles, and group owners to assign\nthem.\n\n\nWatch this video to learn more:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/glvvCoc2hkc?si=dl_SwQ7tyVdzirH5\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\nThere have also been [several UX\nimprovements](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#ux-improvements-to-custom-roles)\nadded to this feature along with the introduction of the following\npermissions:\n\n\n- assign security policy links\n\n- manage and assign compliance frameworks\n\n- manage webhooks\n\n- manage push rules\n\n- manage merge request settings (17.1)\n\n- manage integrations (17.1)\n\n- manage deploy tokens (17.1)\n\n- read CRM contacts (17.1)\n\n\nGitLab releases usually include new permissions to further enable the\nimplementation of the principle of least privilege. To learn more about the\navailable granular security permissions, [visit the available custom\npermission\ndocumentation](https://docs.gitlab.com/ee/user/custom_roles/abilities.html).\n\n\n## Secret detection updates\n\n\nDevelopers may accidentally commit secrets like keys or API tokens to Git\nrepositories from time to time. After a sensitive value is pushed to a\nremote repository, anyone with access to the repository can impersonate the\nauthorized user of the secret and cause mayhem. When this occurs the exposed\nsecrets must be revoked and replaced to address this risk, which can cause\nsystem downtime.\n\n\nGitLab provides [secret\ndetection](https://docs.gitlab.com/ee/user/application_security/secret_detection/)\nto address this risk, and in GitLab 17 it’s gotten even better with the\nfollowing enhancements:\n\n\n- [Support for remote rulesets when overriding or disabling\nrules](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#secret-detection-now-supports-remote-rulesets-when-overriding-or-disabling-rules):\n- Allows you to override or disable rules via a remote configuration.\nTherefore, you can scale rule configurations across multiple projects using\nonly one [TOML](https://toml.io/en/) file.\n\n- [Advanced vulnerability\ntracking](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#introducing-advanced-vulnerability-tracking-for-secret-detection):\nDetects when the same secret has moved within a file due to refactoring or\nunrelated changes. This leads to reduced duplicate findings, simplifying\nvulnerability management.\n\n\nIn GitLab 17.1, [secret push\nprotection](https://about.gitlab.com/releases/2024/06/20/gitlab-17-1-released/#secret-push-protection-available-in-beta)\nis now in Beta. Secret push protection checks the content of each commit\npushed to GitLab. If any secrets are detected, the push is blocked and\ndisplays information about the commit. Therefore, a developer does not need\nto do the extra work of removing and rotating secrets, since they are never\ncommitted upstream.\n\n\n![Push block eue to detected\nsecret](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098874/Blog/Content%20Images/Blog/Content%20Images/2_aHR0cHM6_1750098873858.png)\n\n\nWhen [push protection\noccurs](https://about.gitlab.com/blog/prevent-secret-leaks-in-source-code-with-gitlab-secret-push-protection/),\nyou can see it displays additional information on the commit, including:\n\n\n- the commit ID that contains the secret\n\n- the filename and line number that contains the secret\n\n- the type of secret\n\n\n**Note:**  [Enabling secret push\nprotection](https://docs.gitlab.com/ee/user/application_security/secret_detection/secret_push_protection/#enable-secret-push-protection)\nis as easy as flipping a switch in GitLab Security Configuration.\n\n\nWatch this video to learn more:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/ZNtwXVj3tA8?si=4xJ1rWdThpVjvebv\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## Container registry updates\n\n\nGitLab provides a [built-in container\nregistry](https://docs.gitlab.com/ee/user/packages/container_registry/),\nmaking it easy for developers to store and manage container images for each\nGitLab project without context switching. GitLab 17.1 includes several\nfeatures to enhance the security and efficiency of using the registry:\n\n- [Container images linked to\nsignatures](https://about.gitlab.com/releases/2024/06/20/gitlab-17-1-released/#container-images-linked-to-signatures):\nContainer images in the registry can now be signed and associated with the\nsignature. This can reduce image tampering by allowing developers to quickly\nfind and validate the signatures that are associated with a container image\n\n- [Display the last published date for container\nimages](https://about.gitlab.com/releases/2024/06/20/gitlab-17-1-released/#display-the-last-published-date-for-container-images):\nThe container registry UI has been updated to include accurate\n`last_published_at timestamps`, putting critical data at the top of view.\n\n- [Sort container registry tags by publish\ndate](https://about.gitlab.com/releases/2024/06/20/gitlab-17-1-released/#sort-container-registry-tags-by-publish-date):\nAllows developers to quickly find and validate the most recently published\ncontainer image.\n\n\n![Signed container\ndetails](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098874/Blog/Content%20Images/Blog/Content%20Images/3_aHR0cHM6_1750098873860.png)\n\n\nAdditionally we’ve introduced [container scanning for the\nregistry](https://about.gitlab.com/releases/2024/06/20/gitlab-17-1-released/#container-scanning-for-registry).\nThe container images being used in your application may themselves be based\non other container images that contain known vulnerabilities. Since\ndevelopers heavily make use of the built-in container registry, it is a\nno-brainer to introduce [container\nscanning](https://docs.gitlab.com/ee/user/application_security/container_scanning/)\nfor the registry.\n\n\n[Container scanning for the\nregistry](https://docs.gitlab.com/ee/user/application_security/container_scanning/#container-scanning-for-registry)\ncan be easily enabled by flipping a switch in GitLab Security Configuration.\nOnce it’s enabled, whenever a container image is pushed to the container\nregistry in your project, GitLab checks its tag. If the tag is `latest`,\nthen GitLab creates a new pipeline that scans the image and even produces a\nCycloneDX SBOM.\n\n\n**Note:** At the moment, a vulnerability scan is only performed when a new\nadvisory is published. We are working to detract all vulnerabilities in the\nregistry itself in future iterations.\n\n\nWatch this video to learn more:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/Zuk7Axs-CRw?si=odlgT5HWv_KOnBtq\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## API security scanning updates\n\n\nWhile SAST does a great job of finding vulnerabilities in static source\ncode, there can still be vulnerabilities present in the running application\nthat cannot be detected in source code, such as broken authentication and\nsecurity misconfigurations. For these reasons, GitLab provides dynamic\napplication security testing\n([DAST](https://docs.gitlab.com/ee/user/application_security/dast/)) and\n[Web API\nfuzzing](https://docs.gitlab.com/ee/user/application_security/api_fuzzing/index.html)\nto help discover bugs and potential security issues that other QA processes\nmay miss. \n\n\nIn GitLab 17, we’ve introduced [several\nenhancements](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#api-security-testing-analyzer-updates)\nto our [dynamic scanners which target Web\nAPIs](https://docs.gitlab.com/ee/user/application_security/api_security_testing/index.html),\nincluding:\n\n- system environment variables are now passed from the CI runner to the\ncustom Python scripts used for certain advanced scenarios (like request\nsigning)\n\n- API Security containers now run as a non-root user, which improves\nflexibility and compliance\n\n- support for servers that only offer TLSv1.3 ciphers, which enables more\ncustomers to adopt API security testing.\n\n- scanner image upgraded to Alpine 3.19, which addresses security\nvulnerabilities\n\n\nIn GitLab 17.1, additional configuration variables were added to [API\nsecurity\nscanning](https://about.gitlab.com/releases/2024/06/20/gitlab-17-1-released/#api-security-testing-analyzer-updates)\nand [API\nfuzzing](https://about.gitlab.com/releases/2024/06/20/gitlab-17-1-released/#fuzz-testing-analyzer-updates)\nto allow:\n\n- creation of a comma-separated list of HTTP success status codes that\ndefine whether the job has passed\n\n- disabling of waiting for the target API to become available before\nscanning begins\n\n- specifying the expected status code for the API target availability check\n\n\nWatch this video to learn more:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/CcyOoBgSPUU?si=hAMQfmUTlLRKhPSg\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## Learn more about other enhancements\n\n\nGitLab 17 and 17.1 also introduced several other security and governance\nfeatures and enhancements, too many to cover in this blog. Some of these\nfeatures include:\n\n\n- [Updated filtering on the Vulnerability\nReport](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#updated-filtering-on-the-vulnerability-report):\nYou can now use the filtered search component to filter the Vulnerability\nReport by any combination of status, severity, tool, or activity.\n\n- [Toggle merge request approval policies to fail open or fail\nclosed](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#toggle-merge-request-approval-policies-to-fail-open-or-fail-closed):\nA new fail open option for merge request approval policies to offer\nflexibility to teams who want to ease the transition to policy enforcement\nas they roll out controls in their organization.\n\n- [Optional configuration for policy bot\ncomment](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#optional-configuration-for-policy-bot-comment):\nThe security policy bot posts a comment on merge requests when they violate\na policy to help users understand when policies are enforced on their\nproject, when evaluation is completed, and if there are any violations\nblocking an MR, with guidance to resolve them.\n\n- [Merge request approval policies fail open/closed (policy\neditor)](https://about.gitlab.com/releases/2024/06/20/gitlab-17-1-released/#merge-request-approval-policies-fail-openclosed-policy-editor):\nWithin the policy editor users can now toggle security policies to fail open\nor fail closed. This enhancement extends the YAML support to allow for\nsimpler configuration within the policy editor view.\n\n- [Project owners receive expiring access token\nnotifications](https://about.gitlab.com/releases/2024/06/20/gitlab-17-1-released/#project-owners-receive-expiring-access-token-notifications):\nBoth project owners and maintainers with direct membership now receive email\nnotifications when their project access tokens are close to expiring. This\nhelps keep more people informed about upcoming token expiration.\n\n\nThese are some of the newest security and compliance enhancements provided\nin GitLab 17 and 17.1 that can be applied to strengthen your organization's\nsecurity posture! To learn more about GitLab and the other ways we can\nstrengthen your organization's security throughout all parts of the software\ndevelopment lifecycle, check out the following links:\n\n\n- [GitLab Security and\nCompliance](https://about.gitlab.com/solutions/security-compliance/)\n\n- [GitLab Application Security\ndocumentation](https://docs.gitlab.com/ee/user/application_security/)\n\n- [GitLab security and governance overview\nvideo](https://youtu.be/Y4RC-SW8Ric)\n\n- [GitLab Complete DevSecOps\ndemo](https://gitlab.com/gitlab-da/tutorials/security-and-governance/devsecops/simply-vulnerable-notes)\n\n- [GitLab Complete DevSecOps\ntutorial](https://gitlab-da.gitlab.io/tutorials/security-and-governance/devsecops/simply-vulnerable-notes/) \n\n- [Ultimate guide to the principle of least\nprivilege](https://about.gitlab.com/blog/the-ultimate-guide-to-least-privilege-access-with-gitlab/)\n",[693,717,719,9,695],{"slug":1762,"featured":91,"template":698},"get-to-know-the-security-and-governance-updates-in-gitlab-17-17-1","content:en-us:blog:get-to-know-the-security-and-governance-updates-in-gitlab-17-17-1.yml","Get To Know The Security And Governance Updates In Gitlab 17 17 1","en-us/blog/get-to-know-the-security-and-governance-updates-in-gitlab-17-17-1.yml","en-us/blog/get-to-know-the-security-and-governance-updates-in-gitlab-17-17-1",{"_path":1768,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1769,"content":1775,"config":1779,"_id":1781,"_type":14,"title":1782,"_source":16,"_file":1783,"_stem":1784,"_extension":19},"/en-us/blog/getting-started-with-gitlab-how-to-import-your-projects-to-gitlab",{"title":1770,"description":1771,"ogTitle":1770,"ogDescription":1771,"noIndex":6,"ogImage":1772,"ogUrl":1773,"ogSiteName":685,"ogType":686,"canonicalUrls":1773,"schema":1774},"Getting started with GitLab: How to import your projects to GitLab","Learn how to import your projects from various sources, including Bitbucket, Gitea, GitHub, and GitLab Self-Managed.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097248/Blog/Hero%20Images/Blog/Hero%20Images/blog-getting-started-with-gitlab-banner-0497-option4-fy25_cFwd8DYFLekdnOLmbbChp_1750097247785.png","https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-import-your-projects-to-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Getting started with GitLab: How to import your projects to GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Abubakar Siddiq Ango\"}],\n        \"datePublished\": \"2025-01-28\",\n      }",{"title":1770,"description":1771,"authors":1776,"heroImage":1772,"date":1358,"body":1777,"category":717,"tags":1778},[1538],"*Welcome to our \"Getting started with GitLab\" series, where we help\nnewcomers get familiar with the GitLab DevSecOps platform.*\n\n\nKnowing how to import your projects to GitLab is an essential skill to make\nthe most of the GitLab DevSecOps platform. You’ve [set up your\naccount](https://university.gitlab.com/pages/getting-started), invited\nusers, and\n[organized](https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-manage-users/)\nthem based on your use case or team structure. Now, you need to bring your\nexisting projects into GitLab and start collaborating. These projects can be\nlocal files on your computer or hosted on a different source code management\nplatform. Let's explore the options.\n\n\n## Importing local project files\n\n\nYou don't want to start from scratch every time you import a project. Follow\nthese steps to get into GitLab existing legacy projects or applications that\nexist without version control or use version control.\n\n\n### Git project\n\n\n1. If Git is [already\ninitiated](https://docs.gitlab.com/ee/topics/git/commands.html#git-init) in\nyour local project, create a new project in GitLab and obtain the SSH or\nHTTPS URL by clicking on the **Code** button in the top right corner of your\nproject page.\n\n\n![create a new project in GitLab with SSH/HTTPS\nURLs](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097254/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750097252717.png)\n\n\n2. Switch to your terminal and ensure you are in your project folder:\n\n\n```bash  \n\ncd /project_folder  \n\n```\n\n\n3. Backup your existing [Git\norigin](https://git-scm.com/book/ms/v2/Git-Basics-Working-with-Remotes):\n\n\n```bash\n\n\ngit remote rename origin old-origin\n\n\n```\n\n\n4. Add the [GitLab\nremote](https://git-scm.com/book/ms/v2/Git-Basics-Working-with-Remotes) URL\nfor the new origin, when using SSH:\n\n\n```bash  \n\ngit remote add origin\n[git@gitlab.com](mailto:git@gitlab.com):gitlab-da/playground/abubakar/new-test-repo.git  \n\n```\n\n\nAnd for HTTPS: \n\n\n```bash  \n\ngit remote add origin\nhttps://gitlab.com/gitlab-da/playground/abubakar/new-test-repo.git  \n\n```\n\n\n5. Then push all existing\n[branches](https://docs.gitlab.com/ee/user/project/repository/branches/) and\n[tags](https://docs.gitlab.com/ee/user/project/repository/tags/) to GitLab:\n\n\n```bash  \n\ngit push --set-upstream origin --all  \n\ngit push --set-upstream origin --tags  \n\n```\n\n\nAll your file project files, branches, and tags will be pushed to GitLab and\nyou can start collaborating.\n\n\n### Non-Git project\n\n\nAlternatively, if you have not initiated Git in your project, you will need\nto initialize Git, commit existing files, and push to GitLab as follows:\n\n\n```bash  \n\ngit init --initial-branch=main  \n\ngit remote add origin\ngit@gitlab.com:gitlab-da/playground/abubakar/new-test-repo.git  \n\ngit add .  \n\ngit commit -m \"Initial commit\"  \n\ngit push --set-upstream origin main  \n\n```\n\n\n## Importing from online sources\n\n\nIf you have your project on GitLab.com or other platforms and you want to\nmove it to another GitLab instance (like a self-managed instance) or from\nanother platform to GitLab.com, GitLab provides the import project feature\nwhen you want to create a new project.\n\n\n![Create a new project\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097253/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750097252718.png)\n\n\nImporting a project migrates the project files and some other components of\nthe project depending on the source. You can import from different sources\nlike Bitbucket, GitHub, Gitea, and a GitLab instance, among other sources.\nImport sources are enabled by default on GitLab.com, but they need to be\n[enabled for\nself-managed](https://docs.gitlab.com/ee/administration/settings/import_and_export_settings.html#configure-allowed-import-sources)\nby an administrator. We will look at a few of these sources in the following\nsections.\n\n\n![Import project from third-party\nsources](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097253/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750097252719.png)\n\n\n## GitLab sources\n\n\nYou can export projects from GitLab.com and GitLab Self-Managed instances\nusing the Export project feature in a project’s settings. \n\n\n![Export project\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097253/Blog/Content%20Images/Blog/Content%20Images/image9_aHR0cHM6_1750097252720.png)\n\n\nTo access it:\n\n\n- Go to your project’s settings and click into the **General** area.\n\n- Scroll to and **Expand Advanced** section.\n\n- Select **Export project**.\n\n- A notification will be shown stating: “Project export started. A download\nlink will be sent by email and made available on this page.”\n\n- After the export is generated, you can follow the link contained in the\nemail or refresh the project settings page to reveal the “Download export”\noption.\n\n\n### Importing the project\n\n\n![Import an exported GitLab\nproject](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097253/Blog/Content%20Images/Blog/Content%20Images/image10_aHR0cHM6_1750097252722.png)\n\n\n- Click on the **New project** button in your target GitLab instance.  \n\n- Select **Import project** and click on **GitLab Export** in the list of\nimport sources.  \n\n- Specify a project name and select the export file, then click **Import\nproject**.  \n\n- An \"import in progress\" page will be shown and once complete, you will be\nredirected to the imported project.\n\n\nDepending on the size of your project, the import time may vary. It's\nimportant to note that not everything in a project might be exported and a\nfew things might change after import. Review the\n[documentation](https://docs.gitlab.com/ee/user/project/settings/import_export.html#export-a-project-and-its-data)\nto understand the limitations. If you want to migrate a whole group instead\nof individual projects, the [Direct Transfer\nmethod](https://docs.gitlab.com/ee/user/group/import/index.html) is\nrecommended; this creates a copy of an entire group.\n\n\n## Third-party providers\n\n\nGitLab supports importing from Bitbucket Cloud, Bitbucket Server, FogBugz,\nGitea, and GitHub. The import process is similar across all the supported\nthird parties — the main difference is in the method of authentication.\nLet's look at a few of them.\n\n\n### GitHub\n\n\n![Authenticate with GitHub\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097253/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097252723.png)\n\n\nThere are three methods to import GitHub projects in to GitLab:\n\n\n- [Using GitHub\nOAuth](https://docs.gitlab.com/ee/user/project/import/github.html#use-github-oauth)\n\n- [Using a GitHub personal access\ntoken](https://docs.gitlab.com/ee/user/project/import/github.html#use-a-github-personal-access-token)\n\n- [Using the\nAPI](https://docs.gitlab.com/ee/user/project/import/github.html#use-the-api)\n\n\nImporting using GitHub OAuth and personal access token are similar. The\ndifference lies in how your authorize GitLab to access your repositories.\nThe OAuth method is easier because you only need to click on the “Authorize\nwith GitHub” button and your are redirected to your GitHub account to\nauthorize the connection. Then the list of your projects is loaded for you\nto pick those you want to import.\n\n\n![Import repositories from GitHub\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097253/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097252725.png)\n\n\nAlternatively, you will need to generate a GitHub personal access token,\nselecting the `repo` and `read:org` scopes, and then provide it on the\n\"Import\" page.  For API imports, you can use the same personal access token\nwith our [Import REST API\nendpoints](https://docs.gitlab.com/ee/api/import.html#import-repository-from-github)\nin your script or application.\n\n\nIn this demo, GitLab Senior Developer Advocate Fernando Diaz explains how to\nimport a project from GitHub using the OAuth method:\n\n\n\u003C!-- blank line -->  \n\n\u003Cfigure class=\"video_container\"> \n  \u003Ciframe src=\"https://www.youtube.com/embed/0Id5oMl1Kqs?si=esF6wbz2j2JlhDVL\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>  \n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\nYou can learn about prerequisites, known issues, importing from GitHub\nEnterprise, and other valuable information from the GitLab [import\ndocumentation](https://docs.gitlab.com/ee/user/project/import/github.html).\n\n\n### Bitbucket\n\n\nImporting projects from Bitbucket is similar to importing them from GitHub.\nWhile using OAuth is applicable to [Bitbucket\nCloud](https://docs.gitlab.com/ee/user/project/import/bitbucket.html), the\nSaaS version of Bitbucket, you'll need to provide a URL, username, and\npersonal access token for [Bitbucket\nServer](https://docs.gitlab.com/ee/user/project/import/bitbucket_server.html),\nthe enterprise self-hosted version. Clicking on the Bitbucket Cloud option\non the \"Import\" screen automatically takes you to Atlassian authentication\nfor Bitbucket.\n\n\n![Import project from\nBitBucket](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097253/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097252726.png)\n\n\nYou can also import Bitbucket projects using the [GitLab Import\nAPI](https://docs.gitlab.com/ee/api/import.html).\n\n\n### Gitea\n\n\n![Import project from\nGitea](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097253/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750097252727.png)\n\n\nImporting projects from\n[Gitea](https://docs.gitlab.com/ee/user/project/import/gitea.html) requires\nthe creation of a [personal access\ntoken](https://docs.gitea.com/next/development/api-usage#authentication-via-the-api)\non the Gitea platform and providing it along with the Gitea server URL on\nthe GitLab import page. OAuth authentication is not supported. \n\n\n### Generic remote Git repository\n\n\n![Import project from remote Git\nrepository](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097253/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097252728.png)\n\n\nWhere your Git provider is not supported or import is not possible using the\nsupported methods, a repository can be imported using its accessible\n`https://` or `git://` URL.  If it's not publicly accessible, you will\nprovide the repository URL along with username and password (or access token\nwhere applicable due to multifactor authentication).\n\n\nThis method can also be used for maintaining a copy of a remote project and\nkeeping it in sync, i.e.,\n[mirroring](https://docs.gitlab.com/ee/user/project/repository/mirror/).\nMirroring allows you to maintain repositories across different platforms and\nkeep them synced. This can be to separate private and public access to\nproject while ensuring both ends have the same copy, which is useful when\nopen-sourcing  internal projects. It can also be used when working with\ncontractors and both parties use different platforms, and access to codebase\nis necessary on both ends. \n\n\n## Summary\n\n\nImporting and migrating between GitLab instances and from other sources is\nan important process that needs to be planned to ensure the expectations are\nclear on what gets imported and with which method. While most third-party\nmethods import project items, including files, issues, and merge requests,\nsome methods have known issues and limitations. The [GitLab import\nsection](https://docs.gitlab.com/ee/user/project/import/) of the\ndocumentation has detailed information on all the supported methods that can\nhelp you plan your migration.   \n\n\n> #### Want to take your learning to the next level? [Sign up for GitLab\nUniversity courses](https://university.gitlab.com/). Or you can get going\nright away with [a free trial of GitLab\nUltimate](https://about.gitlab.com/free-trial/devsecops/).\n\n\n## \"Getting started with GitLab\" series\n\n\n- [How to manage\nusers](https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-manage-users/)\n\n- [How to import your projects to\nGitLab](https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-import-your-projects-to-gitlab/)  \n\n- [Mastering project\nmanagement](https://about.gitlab.com/blog/getting-started-with-gitlab-mastering-project-management/)\n\n- [Automating Agile workflows with the gitlab-triage\ngem](https://about.gitlab.com/blog/automating-agile-workflows-with-the-gitlab-triage-gem/)\n\n- [Working with CI/CD\nvariables](https://about.gitlab.com/blog/getting-started-with-gitlab-working-with-ci-cd-variables/)\n",[717,719,9],{"slug":1780,"featured":6,"template":698},"getting-started-with-gitlab-how-to-import-your-projects-to-gitlab","content:en-us:blog:getting-started-with-gitlab-how-to-import-your-projects-to-gitlab.yml","Getting Started With Gitlab How To Import Your Projects To Gitlab","en-us/blog/getting-started-with-gitlab-how-to-import-your-projects-to-gitlab.yml","en-us/blog/getting-started-with-gitlab-how-to-import-your-projects-to-gitlab",{"_path":1786,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1787,"content":1793,"config":1798,"_id":1800,"_type":14,"title":1801,"_source":16,"_file":1802,"_stem":1803,"_extension":19},"/en-us/blog/getting-started-with-gitlab-how-to-manage-users",{"title":1788,"description":1789,"ogTitle":1788,"ogDescription":1789,"noIndex":6,"ogImage":1790,"ogUrl":1791,"ogSiteName":685,"ogType":686,"canonicalUrls":1791,"schema":1792},"Getting started with GitLab: How to manage users","Learn how to manage users using groups, roles, and permissions. Walk through the setup of secure collaboration with proper project access.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097273/Blog/Hero%20Images/Blog/Hero%20Images/blog-getting-started-with-gitlab-banner-0497-option4-fy25_cFwd8DYFLekdnOLmbbChp_1750097273817.png","https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-manage-users","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Getting started with GitLab: How to manage users\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Abubakar Siddiq Ango\"}],\n        \"datePublished\": \"2025-01-14\",\n      }",{"title":1788,"description":1789,"authors":1794,"heroImage":1790,"date":1795,"body":1796,"category":717,"tags":1797},[1538],"2025-01-14","*Welcome to our \"Getting started with GitLab\" series, where we help newcomers get familiar with the GitLab DevSecOps platform.*\n\nEnsuring a safe, compliant, and collaborative environment starts with the most basic of tasks - managing users. In this tutorial, we show you how to establish project members, assign roles and permissions, and create groups and subgroups.\n\nNote: To follow along with this tutorial, you should have a GitLab account either through GitLab.com or your organization's self-managed instance. If you need help, visit our fundamentals area on [GitLab University](https://university.gitlab.com/).\n\nLet's get started.\n\nWhen you create GitLab users, they only have access to [their private projects, public projects, and projects set with internal visibility](https://docs.gitlab.com/ee/user/public_access.html). For the purposes of this tutorial, your project is super secret and only invited members should have access to it – at varying permissions settings. To ensure this, you can invite users as [members of the project](https://docs.gitlab.com/ee/user/project/members/).\n\n## Project members\n\n![Project members screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097278/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097278487.png)\n\nGitLab users can be invited to a project and [assigned a role](https://docs.gitlab.com/ee/user/permissions.html), which determines what they can do in the project. The owner of a project can delegate administrative tasks to other users as maintainers, who can do almost everything an owner does, aside from changes to a project such as deleting, archiving, or transferring a project.\n\n![Invite members screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097278/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097278487.png)\n\n[Maintainers](https://docs.gitlab.com/ee/user/permissions.html#roles) of the project can invite other members as developers who have access to all the features to create, build, and deploy software. Users who are not developers but need project management access can be invited to the project as [planners](https://about.gitlab.com/blog/introducing-gitlabs-new-planner-role-for-agile-planning-teams/), reporters, and guests with varying levels of permissions. These roles can also be used to determine who can make changes to certain branches with [protected branches](https://docs.gitlab.com/ee/user/project/repository/branches/protected.html).\n\nIf you are working with contractors or your use requires user permissions to expire, you can set an expiry date after which the user loses access to the project. Project members can also be identified as direct or indirect members, based on their [membership type](https://docs.gitlab.com/ee/user/project/members/#membership-types). Direct members are invited directly into the project, whereas indirect members are often inherited from a [GitLab group](https://docs.gitlab.com/ee/user/group/) a project belongs to.\n\nNow, let's look at Group memberships.\n\n## Group memberships\n\nGroups in GitLab can be a top level created at the root of a GitLab instance like the [gitlab.com/gitlab-org](http://gitlab.com/gitlab-org). which is a parent group used to organize other subgroups like [gitlab.com/gitlab-org/charts](http://gitlab.com/gitlab-org/charts). Groups are useful even if you only have one project.\n\nGroups can be used for different reasons:\n\n- organizing similar or related projects  \n- organizing users into groups for better team coordination\n\nWhen using groups to organize users, you can organize teams in groups and [invite a group to a project](https://docs.gitlab.com/ee/user/project/members/sharing_projects_groups.html) with a specific role for an entire team. You can have a `dev` group for the developers of the team, `pm` group for the project managers and `leads` for team leads. When inviting the groups, `dev` can be assigned the Developer role, `pm` the Planner role, and `leads` the Maintainer role. \n\n![Invite a group screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097279/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097278488.png)\n\nMembers of each group can be can be added or removed without needing to update project permissions. This is particularly useful when your team has grown to have several projects. However, it is important to [observe best practices](https://docs.gitlab.com/ee/user/project/members/sharing_projects_groups.html#setting-up-a-group-for-collaboration) for using groups for collaboration.\n\nAnother helpful aspect of having users organized in groups is that you can [mention](https://docs.gitlab.com/ee/user/discussions/#mentions) the entire group in issues, merge requests, or comments, which makes keeping an entire team informed easier.\n\n### Create subgroups\n\n[Subgroups](https://docs.gitlab.com/ee/user/group/subgroups/) can be used to further organize users in a group and you can keep adding subgroups up to 20 nested levels. Users in a subgroup inherit the the permissions they have in a parent group. If you want to grant a user in a subgroup a role higher than what they inherited, you will need to [invite them to the subgroup](https://docs.gitlab.com/ee/user/group/subgroups/#override-ancestor-group-membership) with the new higher role. Note: You can not give them a lower role in the subgroup.\n\n### Manage groups\n\nGroup Owners have several management options to determine how users function in a group. For instance, you can set how a user can request access to a group, enable/disable [group mentions](https://docs.gitlab.com/ee/user/group/manage.html#disable-group-mentions), [restrict access](https://docs.gitlab.com/ee/user/group/manage.html#turn-on-restricted-access), or [moderate users](https://docs.gitlab.com/ee/user/group/moderate_users.html), among other options. An exciting new feature, which is still under development at the time of this article's publication, is the [automatic removal of dormant users](https://docs.gitlab.com/ee/user/group/moderate_users.html#automatically-remove-dormant-members) after a minimum of 90 days and a maximum of five years. This will help keep groups clean and better manage the release of license seats.\n\n## Learn more\n\nManaging users on GitLab depends on your use case. If your organization is larger with more advanced workflows and user management, GitLab provides more advanced ways to [manage enterprise users](https://docs.gitlab.com/ee/user/enterprise_user/index.html). You can also explore more options on how to [manage your organization](https://docs.gitlab.com/ee/topics/set_up_organization.html) and with [GitLab Ultimate](https://about.gitlab.com/pricing/ultimate/), you get more granularity and compliance features.\n\n> #### Want to take your learning to the next level? [Sign up for GitLab University courses](https://university.gitlab.com/). Or you can get going right away with [a free trial of GitLab Ultimate](https://about.gitlab.com/free-trial/).\n\n## \"Getting started with GitLab\" series\nRead more articles in our \"Getting started with GitLab\" series:\n\n- [How to import your projects to GitLab](https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-import-your-projects-to-gitlab/)  \n- [Mastering project management](https://about.gitlab.com/blog/getting-started-with-gitlab-mastering-project-management/)\n- [Automating Agile workflows with the gitlab-triage gem](https://about.gitlab.com/blog/automating-agile-workflows-with-the-gitlab-triage-gem/)\n- [Understanding CI/CD](https://about.gitlab.com/blog/getting-started-with-gitlab-understanding-ci-cd/)\n- [Working with CI/CD variables](https://about.gitlab.com/blog/getting-started-with-gitlab-working-with-ci-cd-variables/)\n",[9,695,719,908,717],{"slug":1799,"featured":91,"template":698},"getting-started-with-gitlab-how-to-manage-users","content:en-us:blog:getting-started-with-gitlab-how-to-manage-users.yml","Getting Started With Gitlab How To Manage Users","en-us/blog/getting-started-with-gitlab-how-to-manage-users.yml","en-us/blog/getting-started-with-gitlab-how-to-manage-users",{"_path":1805,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1806,"content":1812,"config":1817,"_id":1819,"_type":14,"title":1820,"_source":16,"_file":1821,"_stem":1822,"_extension":19},"/en-us/blog/getting-started-with-gitlab-mastering-project-management",{"title":1807,"description":1808,"ogTitle":1807,"ogDescription":1808,"noIndex":6,"ogImage":1809,"ogUrl":1810,"ogSiteName":685,"ogType":686,"canonicalUrls":1810,"schema":1811},"Getting started with GitLab: Mastering project management","Discover the key components of project management and how to put them to use for better organization and tracking.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097294/Blog/Hero%20Images/Blog/Hero%20Images/blog-getting-started-with-gitlab-banner-0497-option4-fy25_cFwd8DYFLekdnOLmbbChp_1750097293924.png","https://about.gitlab.com/blog/getting-started-with-gitlab-mastering-project-management","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Getting started with GitLab: Mastering project management\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"GitLab\"}],\n        \"datePublished\": \"2025-02-11\",\n      }",{"title":1807,"description":1808,"authors":1813,"heroImage":1809,"date":1814,"body":1815,"category":717,"tags":1816},[822],"2025-02-11","*Welcome to our \"Getting started with GitLab\" series, where we help newcomers get familiar with the GitLab DevSecOps platform.*\n\nGitLab is much more than just a place to store your code. It is an AI-powered DevSecOps platform with tools to help you plan, organize, track, and successfully deliver your projects. This post will guide you through GitLab's key project management features and show you how to leverage them effectively.\n\n## Why GitLab for project management?\n\nImagine having your code repository, issue tracker, and communication platform all seamlessly integrated in one place. That's the power of GitLab. By centralizing everything, you can streamline your workflow, enhance collaboration, and keep your projects moving forward. No more jumping between different tools and losing track of information. GitLab brings it all together, making it easier to manage your projects from start to finish.\n\n## Key components of GitLab project management\n\nLet's break down the essential elements:\n\n* [Epics](https://docs.gitlab.com/ee/user/group/epics/): Think of epics as the big picture. They represent major features, overarching goals, or long-term initiatives within your project. Need to revamp your website? That's an epic! Epics help you organize your work into larger, manageable chunks.  \n* [Issues](https://docs.gitlab.com/ee/user/project/issues/): Issues are the individual tasks or work items that contribute to your project goals. Each issue represents a specific action, like \"design the homepage\" or \"write the 'about us' page.\" Issues are the building blocks of your project, and they provide a clear way to track individual tasks.  \n* [Labels](https://docs.gitlab.com/ee/user/project/labels.html): Labels are like tags that help you categorize and filter your work. You can use labels to indicate priority (e.g., high, medium, low), status (e.g., to do, in progress, done), or assign issues to specific teams or individuals. Labels provide a flexible way to organize and prioritize your work.  \n* Boards: GitLab's issue boards are your visual workspace. They provide a Kanban-style view of your project, allowing you to see the status of all your issues at a glance. Drag and drop issues across different lists (e.g., \"To Do,\" \"Doing,\" \"Done\") to visualize your workflow and track progress. In GitLab, you can create boards for [issues](https://docs.gitlab.com/ee/user/project/issue_board.html) and [Epics](https://docs.gitlab.com/ee/user/group/epics/epic_boards.html).  \n* [Milestones](https://docs.gitlab.com/ee/user/project/milestones/): Milestones mark significant checkpoints or target dates within your project. They help you track progress towards specific goals and deadlines. For example, you might have milestones for completing a major feature, releasing a beta version, or launching the final product.  \n* [Tasks](https://docs.gitlab.com/ee/user/tasks.html): For those extra granular steps, break down your issues into smaller tasks. This helps with delegation, clarifies individual responsibilities, and ensures nothing gets overlooked. Tasks provide a way to create checklists within issues, making it easier to track progress on complex tasks.\n\n## Deep dive into the features\n\n### 1. Epics: The big picture\n\n* Creating epics: Navigate to your group's \"Epics\" menu under “Plan”. Click **New epic** and give it a descriptive title and a clear description outlining the goal. You can also specify the start and end date of the epic – this is useful when using [Roadmaps](https://docs.gitlab.com/ee/user/group/roadmap/).\n\n![Epic creation page](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097301/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097300817.png)\n\n* [Roadmaps](https://docs.gitlab.com/ee/user/group/roadmap/): Add your epics to a roadmap to visualize your project timeline and long-term goals. Roadmaps provide a bird's-eye view of your project plan, making it easy to see the big picture and track progress towards major milestones.\n\n![Roadmap view](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097301/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750097300818.png)\n\n### 2. Issues: Getting things done\n\n* Creating issues: In your project, go to the \"Issues\" menu under “Plan” and click **New issue**. Provide a concise and descriptive title like \"Design Homepage Wireframes,\" assign it to a team member, set a due date, and add a detailed description outlining the task's requirements.  \n* GitLab Duo: You can leverage the power of [GitLab Duo to create detailed issue descriptions](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#populate-an-issue-with-issue-description-generation) with just a little hint of what you want to achieve.  \n* Weighting: Estimate the effort required for each issue by assigning weights. This helps with planning and prioritization. For example, a simple task might have a weight of **1**, while a more complex task might have a weight of **5**.\n\n![Issue with weight of 4 assigned](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097301/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097300819.png)\n\n### 3. Labels: Organizing your work\n\n* Creating labels: Go to your project's \"Issues\" tab and click Labels. Create custom labels with clear names to categorize your issues. For example, create labels like **Priority: High**, **Status: In Progress**, or **Team: Design**. Apply these labels to your issues to keep them organized and easily filterable.\n\n![Labels screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097301/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750097300820.png)\n\n### 4. Boards: Visualizing your workflow\n\n* Kanban boards: GitLab's boards provide a Kanban-style view of your project. Create lists like \"To Do,\" \"Doing,\" and \"Done\" to represent the stages of your workflow. Drag and drop issues across these lists to visualize their progress.\n* Customizing boards: Tailor your boards to match your specific workflow. Add more columns, filter issues by labels or assignees, and set up swim lanes to categorize issues by epics or other criteria.\n\n![Visualize workflow with issue boards](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097301/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097300820.png)\n\n### 5. Tasks: Breaking down the work\n\n* Creating tasks: Within an issue, use the checklist markdown syntax to create a task list. Each item in the list represents a smaller step within the larger issue. For example, in the issue \"Design Homepage Wireframes,\" you might have tasks like \"Sketch initial concepts,\" \"Create digital wireframes,\" and \"Get feedback from stakeholders.\" To create a Task, click on the **Add** button in the \"Child Items\" section of an issue’s page. Then, enter the title of the task, and click **Create Task**.\n\n![Issue with create task button](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097301/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750097300822.png)\n\n### 6. Milestones: Tracking progress\n* Setting milestones: Define milestones to mark significant points in your project, like completing a specific feature or reaching a key deadline. Give your milestones clear titles and due dates.\n* Associating with issues: Link issues and epics to milestones to track progress towards those goals. This helps you see how individual tasks contribute to the overall project plan.\nCreating a milestone: Under the \"Plan\" dropdown menu, click on **Milestones > New milestone**. Specify the milestone title, description, and start and due dates.\n\n![New milestone screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097301/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750097300823.png)\n\n\u003Cbr>\u003C/br>\n\n![New page wtih milestone on it](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097301/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097300823.png)\n\n### 7. [Iterations](https://docs.gitlab.com/ee/user/group/iterations/): Working in sprints\n\n* Defining iterations: If you're using an Agile workflow, define iterations (sprints) with specific start and end dates. This helps you break down your work into smaller, more manageable time boxes.  \n* Assigning issues: Assign issues to iterations to plan your work in shorter cycles and focus on delivering incremental value.\n\n### 8. [Time tracking](https://docs.gitlab.com/ee/user/project/time_tracking.html): Measuring effort\n\n* Logging time: Within an issue, use the \"/spend\" quick action followed by the time spent (e.g., \"/spend 2h 30m\") to log your work. This helps you track the actual time spent on each task.  \n* Analyzing data: Generate time tracking reports to gain insights into project progress, team efficiency, and identify potential bottlenecks.\n\n![Time tracking report](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097301/Blog/Content%20Images/Blog/Content%20Images/image9_aHR0cHM6_1750097300824.png)\n\n### 9. Dependencies: Managing workflow\n\n* [Linking issues](https://docs.gitlab.com/ee/user/project/issues/related_issues.html): Create dependencies between issues to ensure tasks are completed in the correct order. For example, if issue A must be completed before issue B can begin, you can create a dependency between them. This helps you visualize the workflow and avoid potential roadblocks.\n\n### 10. Templates: Streamlining issue creation\n\n* [Creating templates](https://docs.gitlab.com/ee/user/project/description_templates.html): Create issue templates to standardize the information captured for common tasks, saving you time and ensuring consistency. For example, you could create a template for bug reports that includes fields for steps to reproduce expected behavior and actual behavior.\n\n### Collaboration is key\n\nGitLab fosters collaboration through the following:\n\n* [Comments](https://docs.gitlab.com/ee/user/discussions/): Discuss issues and epics directly within GitLab. Use comments to provide updates, ask questions, and share feedback.  \n* [Mentions](https://docs.gitlab.com/ee/user/discussions/#mentions): Use **@** to mention specific team members and notify them of updates or request their input.  \n* Discussions: Engage in threaded discussions within issues and epics to brainstorm ideas, solve problems together, and keep everyone informed.\n\n### Ready to get started?\n\nNow that you've explored the power of GitLab's project management features, it's time to put them into practice! Create a sample project, experiment with different features, and discover how GitLab can transform your workflow. You can also learn more about how GitLab can help you facilitate [Kanban](https://docs.gitlab.com/ee/tutorials/kanban/) and [Scrum](https://docs.gitlab.com/ee/tutorials/scrum_events/) in the GitLab documentation.\n\n> #### Want to take your learning to the next level? [Sign up for GitLab University courses](https://university.gitlab.com/). Or you can get going right away with a [free trial of GitLab Ultimate](https://about.gitlab.com/free-trial/).\n\n## \"Getting started with GitLab\" series\nRead more articles in our \"Getting started with GitLab\" series:\n\n- [How to manage users](https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-manage-users/)\n- [How to import your projects to GitLab](https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-import-your-projects-to-gitlab/)  \n- [Automating Agile workflows with the gitlab-triage gem](https://about.gitlab.com/blog/automating-agile-workflows-with-the-gitlab-triage-gem/)\n- [Understanding CI/CD](https://about.gitlab.com/blog/getting-started-with-gitlab-understanding-ci-cd/)\n- [Working with CI/CD variables](https://about.gitlab.com/blog/getting-started-with-gitlab-working-with-ci-cd-variables/)",[719,717,9,908],{"slug":1818,"featured":6,"template":698},"getting-started-with-gitlab-mastering-project-management","content:en-us:blog:getting-started-with-gitlab-mastering-project-management.yml","Getting Started With Gitlab Mastering Project Management","en-us/blog/getting-started-with-gitlab-mastering-project-management.yml","en-us/blog/getting-started-with-gitlab-mastering-project-management",{"_path":1824,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1825,"content":1830,"config":1835,"_id":1837,"_type":14,"title":1838,"_source":16,"_file":1839,"_stem":1840,"_extension":19},"/en-us/blog/getting-started-with-gitlab-understanding-ci-cd",{"title":1826,"description":1827,"ogTitle":1826,"ogDescription":1827,"noIndex":6,"ogImage":900,"ogUrl":1828,"ogSiteName":685,"ogType":686,"canonicalUrls":1828,"schema":1829},"Getting started with GitLab: Understanding CI/CD","Learn the basics of continuous integration/continuous delivery in this beginner's guide, including what CI/CD components are and how to create them.","https://about.gitlab.com/blog/getting-started-with-gitlab-understanding-ci-cd","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Getting started with GitLab: Understanding CI/CD\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"GitLab\"}],\n        \"datePublished\": \"2025-04-25\",\n      }",{"title":1826,"description":1827,"authors":1831,"heroImage":900,"date":1832,"body":1833,"category":717,"tags":1834},[822],"2025-04-25","*Welcome to our \"Getting started with GitLab\" series, where we help\nnewcomers get familiar with the GitLab DevSecOps platform.*\n\n\nImagine a workflow where every code change is automatically built, tested,\nand deployed to your users. That's the power of [Continuous\nIntegration/Continuous Delivery\n(CI/CD)](https://about.gitlab.com/topics/ci-cd/)! CI/CD helps you catch bugs\nearly, ensures code quality, and delivers software faster and more\nfrequently.\n\n\n### What is CI/CD?\n\n\n* **Continuous Integration** is a development practice where developers\nintegrate code changes into a shared repository frequently, preferably\nseveral times a day. Each integration is then verified by an automated build\nand test process, allowing teams to detect problems early.  \n\n* **Continuous Delivery** extends CI by automating the release pipeline,\nensuring that your code is *always* in a deployable state. You can deploy\nyour application to various environments (e.g., staging, production) with a\nsingle click or automatically.  \n\n* **Continuous Deployment** takes it a step further by automatically\ndeploying *every successful build* to production. This requires a high\ndegree of confidence in your automated tests and deployment process.\n\n\n### Why GitLab CI/CD?\n\n\nGitLab CI/CD is a powerful, integrated system that comes built-in with\nGitLab. It offers a seamless experience for automating your entire software\ndevelopment lifecycle. With GitLab CI/CD, you can:\n\n\n* **Automate everything:** Build, test, and deploy your applications with\nease.  \n\n* **Catch bugs early:** Detect and fix errors before they reach\nproduction.  \n\n* **Get faster feedback:** Receive immediate feedback on your code\nchanges.  \n\n* **Improve collaboration:** Work together more effectively with automated\nworkflows.  \n\n* **Accelerate delivery:** Release software faster and more frequently.  \n\n* **Reduce risk:** Minimize deployment errors and rollbacks.\n\n\n### The elements of GitLab CI/CD\n\n\n* `.gitlab-ci.yml`**:** This [YAML\nfile](https://docs.gitlab.com/ee/ci/yaml/), located in your project's root\ndirectory, defines your CI/CD pipeline, including stages, jobs, and\nrunners.  \n\n* [**GitLab Runner**](https://docs.gitlab.com/runner/)**:** This agent\nexecutes your CI/CD jobs on your infrastructure (e.g. physical machines,\nvirtual machines, Docker containers, or Kubernetes clusters).  \n\n* [**Stages**](https://docs.gitlab.com/ee/ci/yaml/#stages)**:** Stages\ndefine the order of execution for your jobs (e.g. build, test, and\ndeploy).  \n\n* [**Jobs**](https://docs.gitlab.com/ee/ci/yaml/#job-keywords)**:** Jobs are\nindividual units of work within a stage (e.g. compile code, run tests, and\ndeploy to staging).\n\n\n### Setting up GitLab CI\n\n\nGetting started with GitLab CI is simple. Here's a basic example of a\n`.gitlab-ci.yml` file:\n\n\n```yaml\n\n\nstages:\n  - build\n  - test\n  - deploy\n\nbuild_job:\n  stage: build\n  script:\n    - echo \"Building the application...\"\n\ntest_job:\n  stage: test\n  script:\n    - echo \"Running tests...\"\n\ndeploy_job:\n  stage: deploy\n  script:\n    - echo \"Deploying to production...\"\n  environment:\n    name: production\n\n```\n\n\nThis configuration defines three stages: \"build,\" \"test,\" and \"deploy.\" Each\nstage contains a job that executes a simple script.\n\n\n### CI/CD configuration examples\n\n\nLet's explore some more realistic examples.\n\n\n**Building and deploying a Node.js application**\n\n\nThe pipeline definition below outlines using npm to build and test a Node.js\napplication and [dpl](https://docs.gitlab.com/ci/examples/deployment/) to\ndeploy the application to Heroku. The deploy stage of the pipeline makes use\nof [GitLab CI/CD variables](https://docs.gitlab.com/ci/variables/), which\nallow developers to store sensitive information (e.g. credentials) and\nsecurely use them in CI/CD processes. In this example, an API key to deploy\nto Heroku is stored under the variable key name `$HEROKU_API_KEY` used by\nthe dpl tool.\n\n\n```yaml\n\n\nstages:\n  - build\n  - test\n  - deploy\n\nbuild:\n  stage: build\n  image: node:latest\n  script:\n    - npm install\n    - npm run build\n\ntest:\n  stage: test\n  image: node:latest\n  script:\n    - npm run test\n\ndeploy:\n  stage: deploy\n  image: ruby:latest\n  script:\n    - gem install dpl\n    - dpl --provider=heroku --app=$HEROKU_APP_NAME --api-key=$HEROKU_API_KEY\n\n```\n\n\n**Deploying to different environments (staging and production)**\n\n\nGitLab also offers the idea of\n[Environments](https://docs.gitlab.com/ci/environments/) with CI/CD. This\nfeature allows users to track deployments from CI/CD to infrastructure\ntargets. In the example below, the pipeline adds stages with an environment\nproperty for a staging and production environment. While the deploy_staging\nstage will always run its script, the deploy_production stage requires\nmanual approval to prevent accidental deployment to production.  \n\n\n```yaml\n\n\nstages:\n  - build\n  - test\n  - deploy_staging\n  - deploy_production\n\nbuild:\n  # ...\n\ntest:\n  # ...\n\ndeploy_staging:\n  stage: deploy_staging\n  script:\n    - echo \"Deploying to staging...\"\n  environment:\n    name: staging\n\ndeploy_production:\n  stage: deploy_production\n  script:\n    - echo \"Deploying to production...\"\n  environment:\n    name: production\n  when: manual  # Requires manual approval\n\n```\n\n\n### GitLab Auto DevOps\n\n\n[GitLab Auto DevOps](https://docs.gitlab.com/ee/topics/autodevops/)\nsimplifies CI/CD by providing a pre-defined configuration that automatically\nbuilds, tests, and deploys your applications. It leverages best practices\nand industry standards to streamline your workflow.\n\n\nTo enable Auto DevOps:\n\n\n1. Go to your project's **Settings > CI/CD > General pipelines**.  \n\n2. Enable the **Auto DevOps** option.\n\n\nAuto DevOps automatically detects your project's language and framework and\nconfigures the necessary build, test, and deployment stages. You don’t even\nneed to create a `.gitlab-ci.yml` file.\n\n\n### CI/CD Catalog\n\n\nThe [CI/CD\nCatalog](https://about.gitlab.com/blog/faq-gitlab-ci-cd-catalog/)\nis a list of projects with published [CI/CD\ncomponents](https://docs.gitlab.com/ee/ci/components/) you can use to extend\nyour CI/CD workflow. Anyone can create a component project and add it to the\nCI/CD Catalog or contribute to an existing project to improve the available\ncomponents. You can find published components in the [CI/CD\nCatalog](https://gitlab.com/explore/catalog) on GitLab.com.\n\n\n> [Tutorial: How to set up your first GitLab CI/CD\ncomponent](https://about.gitlab.com/blog/tutorial-how-to-set-up-your-first-gitlab-ci-cd-component/)\n\n\n### CI templates\n\n\nYou can also create your own [CI\ntemplates](https://docs.gitlab.com/ee/ci/examples/) to standardize and reuse\nCI/CD configurations across multiple projects. This promotes consistency and\nreduces duplication.\n\n\nTo create a CI template:\n\n\n1. Create a `.gitlab-ci.yml` file in a dedicated project or repository.  \n\n2. Define your CI/CD configuration in the template.  \n\n3. In your project's `.gitlab-ci.yml` file, use the `include` keyword to\ninclude the template.\n\n\n## Take your development to the next level\n\n\nGitLab CI/CD is a powerful tool that can transform your development\nworkflow. By understanding the concepts of CI/CD, configuring your\npipelines, and leveraging features like Auto DevOps, the CI/CD Catalog, and\nCI templates, you can automate your entire software development lifecycle\nand deliver high-quality software faster and more efficiently.\n\n\n> Want to take your learning to the next level? Sign up for [GitLab\nUniversity courses](https://university.gitlab.com/). Or you can get going\nright away with a [free trial of GitLab\nUltimate](https://about.gitlab.com/free-trial/).\n\n\n## \"Getting Started with GitLab\" series\n\n\nCheck out more articles in our \"Getting Started with GitLab\" series:\n\n\n- [How to manage\nusers](https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-manage-users/)\n\n- [How to import your projects to\nGitLab](https://about.gitlab.com/blog/getting-started-with-gitlab-how-to-import-your-projects-to-gitlab/)  \n\n- [Mastering project\nmanagement](https://about.gitlab.com/blog/getting-started-with-gitlab-mastering-project-management/)\n\n- [Automating Agile workflows with the gitlab-triage\ngem](https://about.gitlab.com/blog/automating-agile-workflows-with-the-gitlab-triage-gem/)\n\n- [Working with CI/CD\nvariables](https://about.gitlab.com/blog/getting-started-with-gitlab-working-with-ci-cd-variables/)\n",[109,1542,1543,9,717,719],{"slug":1836,"featured":91,"template":698},"getting-started-with-gitlab-understanding-ci-cd","content:en-us:blog:getting-started-with-gitlab-understanding-ci-cd.yml","Getting Started With Gitlab Understanding Ci Cd","en-us/blog/getting-started-with-gitlab-understanding-ci-cd.yml","en-us/blog/getting-started-with-gitlab-understanding-ci-cd",{"_path":1842,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1843,"content":1848,"config":1853,"_id":1855,"_type":14,"title":1856,"_source":16,"_file":1857,"_stem":1858,"_extension":19},"/en-us/blog/github-to-gitlab-migration-made-easy",{"title":1844,"description":1845,"ogTitle":1844,"ogDescription":1845,"noIndex":6,"ogImage":1634,"ogUrl":1846,"ogSiteName":685,"ogType":686,"canonicalUrls":1846,"schema":1847},"GitHub to GitLab migration the easy way","Learn how easy it is to migrate from GitHub to GitLab using GitLab's project import functionality.","https://about.gitlab.com/blog/github-to-gitlab-migration-made-easy","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitHub to GitLab migration the easy way\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2023-07-11\",\n      }",{"title":1844,"description":1845,"authors":1849,"heroImage":1634,"date":1850,"body":1851,"category":1277,"tags":1852},[1599],"2023-07-11","If you are using different CI/CD tools and are considering migrating over to\nGitLab, you may be wondering about\n\nthe difficulty of the migration process. Migration is usually a concern for\n[DevSecOps](https://about.gitlab.com/topics/devsecops/) teams when\nconsidering a new solution. This is due to the fact that migrating may\ninvolve heavy lifting. However, migrating to the GitLab AI-powered DevSecOps\nPlatform can be extremely simple and I will show you how step by step. \n\n\nIn this blog post, we will go over how to migrate from GitHub to GitLab\nusing our [project import](https://docs.gitlab.com/ee/user/project/import/)\nfunctionality. Manually migrating GitHub Actions to GitLab pipelines will be\ncovered as well. I have also created a video going over the migration\nprocess for those who prefer that format:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/0Id5oMl1Kqs\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## What data can be migrated from GitHub to GitLab?\n\nGitLab's built-in importer allows for GitHub projects to be automatically\nmigrated into GitLab. The built-in importer\n\nis accessed directly from GitLab's project creation UI. From the UI, you can\nselect what data you wish to migrate to GitLab.\n\n\nThe data that can be migrated includes the following:\n\n* Repository description\n\n* Git repository data\n\n* Branch protection rules\n\n* Collaborators (members)\n\n* Issues\n\n* Pull requests\n\n* Wiki pages\n\n* Milestones\n\n* Labels\n\n* Release notes content\n\n* Release notes attachments\n\n* Comment attachments\n\n* Issue description attachments\n\n* Pull request description attachments\n\n* Pull request review comments\n\n* Regular issue and pull request comments\n\n* Git Large File Storage (LFS) objects\n\n* Pull request reviews\n\n* Pull request assigned reviewers\n\n* Pull request “merged by” information\n\n* Pull request comments replies in discussions\n\n* Pull request review comments suggestions\n\n* Issue events and pull requests events\n\n\nGitHub and GitLab have different naming conventions and concepts, so a\nmapping must be performed during the migration. For example, when\ncollaborators/members are migrated, roles from GitHub are mapped to the\nappropriate GitLab roles as follows:\n\n\n| GitHub role | GitLab role |\n\n| ----------- | ----------- |\n\n| Read        | Guest       |\n\n| Triage      | Reporter    |\n\n| Write       | Developer   |\n\n| Maintain    | Maintainer  |\n\n| Admin       | Owner       |\n\n\n## Prerequisites\n\nNow that you have an understanding of what can be imported, let's review the\nprerequisites for performing the migration.\n\n\nWith the GitLab importer, you can either import your projects from\n**GitHub.com** or **GitHub Enterprise** to either **GitLab.com** or\n**Self-managed GitLab** as long as you meet the following requirements:\n\n* You must be a Maintainer on the GitLab destination group you are importing\nto from GitHub\n\n* Each GitHub author and assignee in the repository must have a\npublic-facing email address on GitHub that matches their GitLab email\naddress\n\n* GitHub accounts must have a public-facing email address that is populated\n\n* [GitHub import\nsource](https://docs.gitlab.com/ee/administration/settings/visibility_and_access_controls.html#configure-allowed-import-sources)\nmust be enabled (Self-managed GitLab only)\n\n\nWhen migrating a user, GitLab uses the public-facing email address in GitHub\nto verify the user with the same email on GitLab. Because email ownership is\nunique, you'll know you have set a valid user with valid permissions.\n\n\n## Performing the import\n\nNow let's go over how to perform the migration. I will be migrating my\nproject, the [Reddit sentiment\nanalyzer](https://github.com/fishtoadsoft/reddit-sentiment-analyzer), from\nGitHub to GitLab. The Reddit sentiment analyzer contains a pull request\n(called a merge request in GitLab), issues, and comments. \n\n\n**Note:** While you may not have permissions to my project, the step-by-step\nprocess applies to any project you own. I am using my project so you can see\nhow I migrate GitHub Actions in the next section. Now, let's get started!\n\n\n1) Create a new project in GitLab using the [Project Creation\nInterface](https://gitlab.com/projects/new).\n\n\n2) Select the **Import Project** box. This allows you to migrate data from\nexternal sources.\n\n\n![Import project\nbox](https://about.gitlab.com/images/blogimages/2023-july-github-to-gitlab-migration/import_project.png)\n\n\n3) Under **Import project from**, press the **GitHub** button. This will\ntake you to the **Authenticate with GitHub** page.\n\n\n4) Press the **Authenticate with GitHub** button. You can also use a\n[personal access\ntoken](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens)\nfrom GitHub with the **repo scope** if you prefer. This will take you to the\nGitHub authorization app.\n\n\n5) From here, you can grant access to [GitHub\norganization(s)](https://docs.github.com/en/organizations/collaborating-with-groups-in-organizations/about-organizations)\nwhere the projects you wish to migrate are located.\n\n\n![GitHub authorization\napp](https://about.gitlab.com/images/blogimages/2023-july-github-to-gitlab-migration/github_authorize_app.png)\n\n\n6) Press the **Grant** button for the organization where the project you\nwish to migrate is stored.\n\n\n7) Press the **Authorize gitlabhq** button to grant GitLab access to the\norganization(s) selected. You will then be taken to the import selection\npage.\n\n\n8) From here, you can select the items you wish to import. \n\n\n![Import\nselection](https://about.gitlab.com/images/blogimages/2023-july-github-to-gitlab-migration/import_selection.png)\n\n\n**Note:** The more items you choose to migrate, the longer the import will\ntake.\n\n\n9) Then you must set the GitLab location you want to migrate the GitHub\nproject to.\n\n\n![Set the GitLab location to migrate\nto](https://about.gitlab.com/images/blogimages/2023-july-github-to-gitlab-migration/import_to.png)\n\n\n10) Press the **Import** button and the import will begin. You can see the\nprogress in the UI. Once the import is complete the status will be changed\nto \"complete.\"\n\n\n[Import progress\nstatus](/images/blogimages/2023-july-github-to-gitlab-migration/import_progress.png)\n\n\nNow you should have the imported project in your workspace. Mine is called\n[https://gitlab.com/awkwardferny/reddit-sentiment-analyzer](https://gitlab.com/awkwardferny/reddit-sentiment-analyzer).\nWhen examining the imported project, you can see the following:\n\n\n**Repository has been migrated**\n\n\n![Repository has been\nmigrated](https://about.gitlab.com/images/blogimages/2023-july-github-to-gitlab-migration/migrated_data.png)\n\n\n**Issue has been migrated**\n\n\n![Issue has been\nmigrated](https://about.gitlab.com/images/blogimages/2023-july-github-to-gitlab-migration/migrated_issue.png)\n\n\n**Merge request has been migrated**\n\n\n![Merge request has been\nmigrated](https://about.gitlab.com/images/blogimages/2023-july-github-to-gitlab-migration/migrated_merge_request.png)\n\n\n## Migrating GitHub Actions over to GitLab CI/CD\n\nNow that you have migrated the project over from GitHub, notice that none of\nthe GitHub Actions are running. Don't worry, they are very easy to migrate\nmanually. So let's start the migration process for Actions.\n\n\n1) Examine the GitHub Actions within the **.github/workflows** folder. In\nthe [project you just\nimported](https://gitlab.com/awkwardferny/reddit-sentiment-analyzer/-/tree/master/.github/workflows),\nyou should see three different Action files:\n\n\n#### lint.yml\n\nThis file contains the Action, which performs linting on the source code\nusing flake8. It uses the python:3.10 Docker image and installs the\napplication requirements before performing the lint.\n\n\n```yaml\n\nname: \"Lint\"\n\n\non:\n  push:\n    branches: [ master ]\n  pull_request:\n    branches: [ master ]\n\njobs:\n  lint:\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v3\n    - name: Set up Python 3.10\n      uses: actions/setup-python@v4\n      with:\n        python-version: \"3.10\"\n    - name: Install dependencies\n      run: |\n        python -m pip install --upgrade pip\n        pip install flake8 pytest\n        if [ -f requirements.txt ]; then pip install -r requirements.txt; fi\n    - name: Lint with flake8\n      run: |\n        # stop the build if there are Python syntax errors or undefined names\n        flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics\n        # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide\n        flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics\n```\n\n\n#### smoke.yml\n\nThis file contains the action which performs a smoke test by just running\nthe CLI help menu. It uses the python:3.10 Docker image and installs the\napplication requirements before performing the smoke test.\n\n\n```yaml\n\nname: \"Smoke Tests\"\n\n\non:\n  push:\n    branches: [ master ]\n  pull_request:\n    branches: [ master ]\n\njobs:\n  smoke-tests:\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v3\n    - name: Set up Python 3.10\n      uses: actions/setup-python@v4\n      with:\n        python-version: \"3.10\"\n    - name: Install dependencies\n      run: |\n        python -m pip install --upgrade pip\n        pip install setuptools\n        if [ -f requirements.txt ]; then pip install -r requirements.txt; fi\n    - name: Install Sentiment Analysis Application\n      run: |\n        python setup.py install\n    - name: Run smoke tests\n      run: |\n        reddit-sentiment --help\n```\n\n\n#### unit.yml\n\nThis file contains the Action, which performs unit tests using pytest. It\nuses the python:3.10 Docker image and installs the application requirements\nrunning the unit tests.\n\n\n```yaml\n\nname: \"Unit Tests\"\n\n\non:\n  push:\n    branches: [ master ]\n  pull_request:\n    branches: [ master ]\n\njobs:\n  unit-tests:\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v3\n    - name: Set up Python 3.10\n      uses: actions/setup-python@v4\n      with:\n        python-version: \"3.10\"\n    - name: Install dependencies\n      run: |\n        python -m pip install --upgrade pip\n        pip install pytest\n        if [ -f requirements.txt ]; then pip install -r requirements.txt; fi\n    - name: Test with pytest\n      run: |\n        python -m pip install --upgrade pip\n        if [ -f test-requirements.txt ]; then pip install -r test-requirements.txt; fi\n        pytest tests/\n```\n\n\nNow let's go ahead and migrate these Actions over to GitLab.\n\n\n2) Go to the recently imported project on GitLab and open up the\n[WebIDE](https://docs.gitlab.com/ee/user/project/web_ide/).\n\n\n3) Create a file at the root called\n[**.gitlab-ci.yml**](https://docs.gitlab.com/ee/ci/yaml/gitlab_ci_yaml.html).\n\nThis file defines the GitLab pipeline.\n\n\n4) Add the following configuration, which will add the GitHub Actions as\nJobs in the GitLab pipeline. Notice the comments I added describing each\nsection.\n\n\n```yaml\n\n# This creates the stages in which the jobs will run. By default all\n\n# jobs will run in parallel in the stage. Once the jobs are completed\n\n# successfully then you move on to the next stage. The way jobs run\n\n# is completely configurable.\n\nstages:\n  - test\n\n# With the include statement, you can quickly add jobs which have\n\n# been pre-defined in external YAMLs. The SAST job I included below\n\n# is provided and maintained by GitLab and adds Static Application\n\n# Security Testing (SAST) to your pipeline.\n\ninclude:\n  - template: Jobs/SAST.gitlab-ci.yml\n\n# This is the unit test job which does exactly what is defined in\n\n# the GitHub Action in unit.yml. You can see it uses the python:3.10\n\n# Docker image, installs the application dependencies, and then runs\n\n# the unit tests with pytest. It was added with a simple copy and\n\n# paste and minor syntax changes.\n\nunit:\n  image: python:3.10\n  stage: test\n  before_script:\n    - python -m pip install --upgrade pip\n    - pip install pytest\n    - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi\n  script:\n    - pytest tests/\n\n# This is the lint job which does exactly what is defined in the\n\n# GitHub Action in lint.yml. You can see it uses the python:3.10\n\n# Docker image, installs the application dependencies, and then\n\n# performs the linting with flake8. It was added with a simple copy\n\n# and paste and minor syntax changes.\n\nlint:\n  image: python:3.10\n  stage: test\n  before_script:\n    - python -m pip install --upgrade pip\n    - pip install flake8\n    - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi\n  script:\n    - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics\n    - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics\n\n# This is the smoke test job which does exactly what is defined in\n\n# the GitHub Action in smoke.yml. You can see it uses the python:3.10\n\n# Docker image, installs the application dependencies, and then runs\n\n# the smoke tests with the Reddit sentiment analysis CLI. It was\n\n# added with a simple copy and paste and minor syntax changes.\n\nsmoke:\n  image: python:3.10\n  stage: test\n  before_script:\n    - python -m pip install --upgrade pip\n    - pip install setuptools\n    - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi\n    - python setup.py install\n  script:\n    - reddit-sentiment --help\n```\n\n\nYou can see that scripts being executed in GitLab match those scripts within\nthe GitHub Actions. The only thing that has really changed is the syntax\nsetting up the jobs and stages. To learn more on how to create and configure\npipelines, check out the [GitLab CI/CD\ndocumentation](https://docs.gitlab.com/ee/ci/).\n\n\n5) Let's check in the code. From the WebIDE click on the Source Control Tab\nin the side panel of the WebIDE. It is the [third icon from the\ntop](https://code.visualstudio.com/docs/sourcecontrol/overview#_commit).\nThen press the **Commit to 'main'** button, select **Continue**, and voila,\nyou should now have a running pipeline.\n\n\n6) Examine the pipeline and make sure the jobs are running properly. Go back\nto your project and click on the\n[pipeline](https://docs.gitlab.com/ee/ci/pipelines/) icon. You can see the\nthe four jobs we created have run.\n\n\n![Four jobs have\nrun](https://about.gitlab.com/images/blogimages/2023-july-github-to-gitlab-migration/gitlab_jobs.png)\n\n\n7) Click on the **Unit** job and you can see that the unit tests were run\nsuccessfully.\n\n\n```bash\n\n$ pytest tests/\n\n============================= test session starts\n==============================\n\nplatform linux -- Python 3.10.11, pytest-7.3.1, pluggy-1.0.0\n\nrootdir: /builds/awkwardferny/reddit-sentiment-analyzer\n\ncollected 2 items\n\ntests/test_scraper.py ..                                                \n[100%]\n\n============================== 2 passed in 0.09s\n===============================\n\nCleaning up project directory and file based variables\n\n00:00\n\nJob succeeded\n\n```\n\n\nAnd that's how simple it is to migrate a project over from GitHub to GitLab!\n\n\n## What other platforms can GitLab import from?\n\nThe GitLab importer allows one-click migration from several other platforms.\nThese platforms include:\n\n* [Bitbucket\nCloud](https://docs.gitlab.com/ee/user/project/import/bitbucket.html)\n\n* [Bitbucket Server\n(Stash)](https://docs.gitlab.com/ee/user/project/import/bitbucket_server.html)\n\n* [FogBugz](https://docs.gitlab.com/ee/user/project/import/fogbugz.html)\n\n* [Gitea](https://docs.gitlab.com/ee/user/project/import/gitea.html)\n\n* [Repository by\nURL](https://docs.gitlab.com/ee/user/project/import/repo_by_url.html)\n\n* [Uploading a manifest file\n(AOSP)](https://docs.gitlab.com/ee/user/project/import/manifest.html)\n\n* [Jira (issues\nonly)](https://docs.gitlab.com/ee/user/project/import/jira.html)\n\n\nWe also have documentation covering how to migrate from these platforms:\n\n*\n[SVN](https://docs.gitlab.com/ee/user/project/import/#import-from-subversion)\n\n* [ClearCase](https://docs.gitlab.com/ee/user/project/import/clearcase.html)\n\n* [CVS](https://docs.gitlab.com/ee/user/project/import/cvs.html)\n\n* [Perforce](https://docs.gitlab.com/ee/user/project/import/perforce.html)\n\n* [TFVC](https://docs.gitlab.com/ee/user/project/import/tfvc.html)\n\n\n---\n\n\nThanks for reading! Now you know how easy it is to migrate from GitHub over\nto GitLab. For more information on GitLab\n\nand migrating from GitHub, follow the links below:\n\n\n* [GitHub-to-GitLab project migration\ndocumentation](https://docs.gitlab.com/ee/user/project/import/github.html)\n\n* [Available project\nimporters](https://docs.gitlab.com/ee/user/project/import/#available-project-importers)\n\n* [GitHub-to-GitLab migration video](https://youtu.be/0Id5oMl1Kqs)\n\n\nAlso, read how GitLab has been named a leader in the DevOps platforms space\nby\n[Gartner](https://about.gitlab.com/blog/gitlab-leader-gartner-magic-quadrant-devops-platforms/)\nand the integrated software delivery platforms space by\n[Forrester](https://about.gitlab.com/blog/gitlab-leader-forrester-wave-integrated-software-delivery-platforms/).\n\n\n_Cover image by [Julia\nCraice](https://unsplash.com/@jcraice?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)\non\n[Unsplash](https://unsplash.com/s/photos/migration?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)_\n",[109,719,951,9],{"slug":1854,"featured":6,"template":698},"github-to-gitlab-migration-made-easy","content:en-us:blog:github-to-gitlab-migration-made-easy.yml","Github To Gitlab Migration Made Easy","en-us/blog/github-to-gitlab-migration-made-easy.yml","en-us/blog/github-to-gitlab-migration-made-easy",{"_path":1860,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1861,"content":1867,"config":1873,"_id":1875,"_type":14,"title":1876,"_source":16,"_file":1877,"_stem":1878,"_extension":19},"/en-us/blog/gitlab-16-ai-and-security-take-center-stage",{"title":1862,"description":1863,"ogTitle":1862,"ogDescription":1863,"noIndex":6,"ogImage":1864,"ogUrl":1865,"ogSiteName":685,"ogType":686,"canonicalUrls":1865,"schema":1866},"GitLab 16: AI and security take center stage","Our GitLab 16 launch event showcased our AI-powered workflows that drive usability improvements, security enhancements, and observability advancements.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749671793/Blog/Hero%20Images/16_0-cover-image.png","https://about.gitlab.com/blog/gitlab-16-ai-and-security-take-center-stage","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab 16: AI and security take center stage\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"David DeSanto, Chief Product Officer, GitLab\"}],\n        \"datePublished\": \"2023-06-30\",\n      }",{"title":1862,"description":1863,"authors":1868,"heroImage":1864,"date":1870,"body":1871,"category":801,"tags":1872},[1869],"David DeSanto, Chief Product Officer, GitLab","2023-06-30","\nThe new era of DevSecOps is here – and its focus is on improving everyone’s experience through AI-powered workflows that drive usability improvements, security enhancements, and observability advancements.\n\nAt our recent GitLab 16 launch event, we highlighted how our platform has evolved to fuel productivity and efficiency, which are top of mind in 2023, according to GitLab’s [2023 Global DevSecOps Report: Productivity & Efficiency Within Reach](https://about.gitlab.com/developer-survey/).\n\n> If you missed the GitLab 16 event, it’s [available on demand](https://about.gitlab.com/sixteen/), so it’s not too late.\n\nWhat I find most exciting about the launch of [GitLab 16](https://about.gitlab.com/releases/2023/05/22/gitlab-16-0-released/) is that it marks a significant milestone for our customers, as well as for GitLab, and heralds the era of AI-powered DevSecOps. We’ve built upon the nearly 500 new capabilities we introduced with GitLab 15 and have continued that upward trajectory. \n\nThe GitLab 16 event showcased amazing new capabilities across our entire DevSecOps platform, which is reinforced by our significant investments in critical areas: \n\n1. Building a world-class DevSecOps experience that includes significant usability improvements, additional collaboration capabilities, and [AI-assisted workflows](https://about.gitlab.com/blog/ai-ml-in-devsecops-series/). \n2. Providing advanced security and compliance, deepening our capabilities, and bringing [software supply chain security](https://about.gitlab.com/blog/the-ultimate-guide-to-software-supply-chain-security/) to the forefront of software development.\n3. Bringing observability, analytics, and feedback into our DevSecOps platform, [empowering organizations to close the SDLC loop](https://docs.gitlab.com/ee/user/analytics/value_streams_dashboard.html) with user data.\n4. Offering GitLab for data science workloads to enable data scientists and data engineers to benefit from all the value our DevSecOps Platform provides, including collaboration, reproducibility, and streamlined deployment into production.\n\n## Introducing GitLab Duo\nAs part of GitLab 16, we [unveiled GitLab Duo](https://about.gitlab.com/blog/meet-gitlab-duo-the-suite-of-ai-capabilities/), our suite of enterprise-grade AI capabilities powering DevSecOps workflows. GitLab Duo is integrated directly into the DevSecOps platform, enabling you to write better code faster and more efficiently. And GitLab Duo goes well beyond code creation to deliver AI-assisted workflows at all stages of the software development lifecycle, such as security testing and analysis, observability, and proactive vulnerability detection. Our goal is to help you achieve a 10x improvement in workflow efficiency by tapping into all of the DevSecOps platform’s AI capabilities. \n\nGitLab Duo includes Code Suggestions, Explain this Code, Explain this Vulnerability, Summarize Issue Comments, Chat, and more.\n\nLearn about the [powerful features of GitLab Duo](https://about.gitlab.com/gitlab-duo/).\n\n## Joining forces with Google Cloud\nAt the launch June Yang, VP of Cloud AI and Industry Solutions at Google Cloud, discussed our partnership with [Google Cloud](https://about.gitlab.com/partners/technology-partners/google-cloud-platform/), (as recently [announced in May](https://about.gitlab.com/press/releases/2023-05-02-gitlab-and-google-cloud-partner-to-expand-ai-assisted-capabilities/)) in which we’re building several joint solutions that will help enterprise customers to improve the efficiency, effectiveness, and quality of their software development processes.\n\nBoth GitLab and Google Cloud are committed to developing new AI-powered solutions that help businesses improve their software development processes and protect their data.\n\nGitLab's vision for generative AI is grounded in privacy, security, and transparency. The partnership with Google Cloud enables GitLab to offer private and secure AI-powered features, while ensuring customer intellectual property (i.e., their source code) stays theirs and will not be used for training and fine-tuning of AI models. \n\n## CARFAX’s DevSecOps results\nMark Portofe, Director of Platform Engineering at CARFAX, also joined us for our GItLab 16 launch. It was enlightening to hear how CARFAX has been using GitLab since 2017 to make them more productive and more secure. Mark shared how CARFAX can now [create CI/CD pipelines](https://about.gitlab.com/solutions/continuous-integration/) in hours instead of the days or even weeks it took before – freeing up their developers’ time to focus on getting code to production. As a result, their number of production deployments has increased by 20% year over year.\n\n## GitLab Dedicated\nAs part of our GitLab 16 event, we also shared that [GitLab Dedicated](https://about.gitlab.com/dedicated/), our single-tenant SaaS offering of GitLab’s DevSecOps platform designed to address the needs of customers with stringent compliance requirements, is now generally available. \n\nWith GitLab Dedicated, organizations can access all of the benefits of the DevSecOps platform delivered as a SaaS offering – including faster releases, better security, and more productive developers – while satisfying compliance requirements such as data residency, isolation, and private networking.\n\n## Value Stream Analytics and Dashboards\nWhen it comes to observability, analytics, and feedback, our single application shines by providing end-to-end metrics and insights. We see this being very much native to our DevSecOps platform.\n\nWe’ve made great strides with [Value Streams Dashboards](https://www.youtube.com/watch?v=EA9Sbks27g4), a  popular feature with our customers. These dashboards combine DORA 4 metrics with GitLab-specific metrics to give organizations insights into the health of their software delivery, identifying areas of efficiency and areas for improvement. \n\nWe are also introducing [Product Analytics](https://about.gitlab.com/blog/introducing-product-analytics-in-gitlab/) in GitLab 16 to close the DevSecOps loop with user metrics and feedback from the applications that organizations are building with GitLab, which they can incorporate into their planning efforts. \n\n## Watch the GitLab 16 launch event\nI’m really proud of the work our teams put into GitLab 16 to make it a reality. To hear more and dig deeper into the amazing capabilities of GitLab 16, check out the [launch event](https://about.gitlab.com/sixteen/).\n",[803,9,781,693],{"slug":1874,"featured":6,"template":698},"gitlab-16-ai-and-security-take-center-stage","content:en-us:blog:gitlab-16-ai-and-security-take-center-stage.yml","Gitlab 16 Ai And Security Take Center Stage","en-us/blog/gitlab-16-ai-and-security-take-center-stage.yml","en-us/blog/gitlab-16-ai-and-security-take-center-stage",{"_path":1880,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1881,"content":1885,"config":1891,"_id":1893,"_type":14,"title":1894,"_source":16,"_file":1895,"_stem":1896,"_extension":19},"/en-us/blog/gitlab-18-3-expanding-ai-orchestration-in-software-engineering",{"config":1882,"title":1883,"description":1884},{"noIndex":6},"GitLab 18.3: Expanding AI orchestration in software engineering","Learn how we're advancing human-AI collaboration with enhanced Flows, enterprise governance, and seamless tool integration.",{"title":1883,"description":1884,"authors":1886,"heroImage":1888,"date":1561,"body":1889,"category":801,"tags":1890},[1887],"Bill Staples","https://res.cloudinary.com/about-gitlab-com/image/upload/v1755711502/wuuadis1pza3zehqohcc.png","Today, GitLab is a comprehensive DevSecOps platform, unifying every stage of the software lifecycle. Building on that foundation, we're on a journey toward becoming the world's first AI-native platform for software engineering. At GitLab, we believe the future of software engineering is an inherently human and AI collaboration, and we want to bring the very best AI capabilities to every GitLab user.  \n\nThis transformation is happening at three distinct layers that go beyond what other AI dev tools are doing:\n\n![AI-native transformation slide visualizing what's laid out below](https://res.cloudinary.com/about-gitlab-com/image/upload/v1755762266/iwuugge3cxweiyvi0yjk.png)\n\n**First, we are a system of record.** Our unified data platform holds your most valuable digital assets. This includes your source code and intellectual property, as well as a wealth of unstructured data spanning project plans, bug backlogs, CI/CD configurations, deployment histories, security reports, and compliance data. This creates a treasure trove of contextual data that remains securely within your GitLab environment, unavailable to generic agents or large language models.\n\n**Second, we act as your software control plane.** We orchestrate your most critical business processes through Git repositories, REST APIs, and webhook-based interfaces that power your end-to-end software delivery. Many of our customers consider this a tier-0 dependency that their critical business processes rely on daily.\n\n**Third, we deliver a powerful user experience.** We deliver an integrated interface that helps eliminate the costly context-switching that slows down most engineering teams. With complete lifecycle visibility and collaboration tools in one platform, over 50 million registered users and our vast community depend on GitLab to get their work done. This expertise positions GitLab uniquely to pioneer intuitive human-to-AI collaboration that amplifies team productivity while preserving the workflows that our users know and trust.\n\n**Extending our platform with AI natively integrated at every layer**\n\n[GitLab Duo Agent Platform](https://about.gitlab.com/gitlab-duo/agent-platform/) integrates and extends all three of these layers. It is designed for extensibility and interoperability, enabling customers and partners to build solutions that create even more value. Our open platform approach emphasizes seamless connectivity with external AI tools and systems while being deeply integrated into our existing stack at all three layers.\n\n* First, we're extending our unified data platform with a **Knowledge Graph,** which indexes and stitches together code with all of the rest of your unstructured data, specifically optimized for agentic access. AI thrives on context, and we believe this will not only accelerate reasoning and inference by agents but also deliver lower-cost and higher-quality agentic outcomes.\n* Second, we're adding an important **Orchestration Layer** to our existing Control Plane in three distinct parts: enabling agents and flows to register as subscribers for GitLab SDLC events, building a new orchestration engine that allows for purpose-built, multi-agent flows, and exposing GitLab tools, agents, and flows via MCP and standard protocols for unparalleled interoperability.\n* Finally, we're extending the **GitLab experience** to deliver first-class agents and agent flows across the entire software development lifecycle. You'll be able to assign async tasks to agents, @ mention them in comments, and create custom agents with context specific to your workflows — but more importantly, GitLab is shipping native agents for every stage of development while unlocking a rich ecosystem of third-party agents. This creates true human-to-AI collaboration where agents become as natural to work with as your human teammates.\n\nWatch this video to see what's coming in 18.3 and beyond, or read on.\n\n\u003Cdiv style=\"padding:75% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1111796316?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"GitLab_18.3 Release_081925_MP_v1\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n## What's new in GitLab 18.3\n\nWith 18.2, we introduced specialized [AI agents](https://about.gitlab.com/blog/gitlab-duo-agent-platform-public-beta/#agents-that-work-out-of-the-box:~:text=Agents%20that%20work%20out%20of%20the%20box) that work alongside developers across the software development lifecycle, plus our [Software Development Flow](\u003Chttps://about.gitlab.com/blog/gitlab-duo-agent-platform-public-beta/#agents-that-work-out-of-the-box:~:text=we%20are%20building%3A-,Software%20Development%20Flow,-(now%20in%20beta>) — a powerful feature that gives users the ability to orchestrate multiple agents to plan, implement, and test code changes end-to-end.\n\nGitLab 18.3 introduces expanded integrations and interoperability, more Flows, and enhanced context awareness across the entire software development lifecycle.\n\n### Expanded integrations and interoperability\n\nWe're delivering comprehensive AI extensibility through both first-party GitLab agents and a rich ecosystem of third-party agents, all with full access to project context and data. This approach maintains native GitLab workflows and governance while providing the flexibility to choose preferred tools through highly integrated orchestration between these agents and GitLab's core platform. Teams gain enhanced AI functionality while preserving key integration, oversight, and user experience benefits.\n\n* **MCP server - Universal AI integration:** GitLab's MCP ([Model Context Protocol](https://about.gitlab.com/topics/ai/model-context-protocol/)) server enables AI systems to securely integrate directly with your GitLab projects and development processes. This standardized interface eliminates custom integration overhead and allows your AI tools — including [Cursor](https://docs.cursor.com/en/tools/mcp) — to work intelligently within your existing GitLab environment. See our [docs](https://docs.gitlab.com/user/gitlab_duo/model_context_protocol/mcp_server/) for a full list of tools included with 18.3. **This is only the start; additional tools are planned for 18.4.**\n> *“Bringing GitLab workflows directly into Cursor is a critical step in reducing friction for developers. By minimizing the need for context switching, teams can check issue status, review merge requests, and monitor pipeline results without ever leaving their coding environment. This integration is a natural fit for our shared customers, and we look forward to a long-term partnership with GitLab to continue enhancing developer productivity.”*\n>\n> \\- **Ricky Doar, VP of Field Engineering at Cursor**\n>\n> *“GitLab's MCP server and CLI agent support create powerful new ways for Amazon Q to integrate with development workflows. Amazon Q Developer can now connect directly through GitLab's remote MCP interface, while teams can delegate development tasks by simply @ mentioning Amazon Q CLI in issues and merge requests. The robust security and governance capabilities built into these integrations give enterprises the confidence to leverage AI coding tools while preserving their development standards. Our partnership with GitLab demonstrates AWS' ongoing commitment to expanding our AI ecosystem and making intelligent development tools accessible wherever developers work.\"*\n>\n> \\- **Deepak Singh, Vice President of Developer Agents and Experiences at AWS**\n\n* **CLI agent support for Claude Code, Codex, Amazon Q, Google Gemini, and opencode (Bring Your Own Key):** 18.3 introduces integrations that enable teams to delegate routine development work by @ mentioning their agents directly in issues or merge requests. When developers mention these AI assistants, they automatically read the surrounding context and repository code, then respond to the user's comment with either ready-to-review code changes or inline comments. These integrations require you to bring your own API key for the respective AI providers and keep all interactions natively within GitLab's interface while maintaining proper permissions and audit trails.\n\n\n  **Note:** Third-party agents is a GitLab Premium Beta feature and only available to GitLab Duo Enterprise customers for evaluation.  \n\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1111784124?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Third Party Agents Flows Claude Code\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n> *“Bringing Claude Code directly into GitLab puts AI assistance where millions of developers already collaborate and ship code daily. The ability to mention Claude directly in issues and merge requests removes friction while maintaining quality with human oversight and review processes. This update brings Claude Code's capabilities to more places where teams work, making AI a natural part of their developer workflow.”*\n>\n> **\\- Cat Wu, Claude Code Product Lead, Anthropic**\n>\n> *“With GitLab's new agent integration in 18.3 you can use opencode within your existing workflows. You can @mention opencode in an issue or merge request and it'll run your agent right in your CI pipeline. This ability to configure and run opencode the way you want is the type of integration we know the open source community really values.”*\n>\n> **\\- Jay V., CEO, opencode**\n\n* **Agentic Chat support for Visual Studio IDE and GitLab UI available to all Premium and Ultimate customers:** With 18.3, you no longer need to context-switch between tools to access GitLab's full development lifecycle data. Our enhanced integrations bring the complete power of GitLab Duo into the GitLab UI as well as IDEs — expanding support from JetBrains and VS Code to now include Visual Studio. This helps developers stay in flow while accessing rich project context, deployment history, and team collaboration data directly within their preferred environment.\n* **Expanded AI model support:** GitLab Duo Self-Hosted now supports additional AI models, giving teams more flexibility in their AI-supported development workflows. You can now deploy open source OpenAI GPT models (20B and 120B parameters) through vLLM on your datacenter hardware, or through cloud services like Azure OpenAI and AWS Bedrock in your private cloud. Additionally, Anthropic's Claude 4 is available on AWS Bedrock\n\n### New automated development flows\n\nGitLab Flows coordinate multiple AI agents with pre-built instructions to autonomously handle those time-consuming, mundane tasks so developers can focus on the work that matters most.\n\nGitLab 18.3 comes with two new Flows:\n\n* **Issue to MR Flow enabling automated code generation from concept to completion in minutes:** This Flow automatically converts issues into actionable merge requests (MRs) by coordinating agents to analyze requirements, prepare comprehensive implementation plans, and generate production-grade code that's ready for review — helping you turn ideas into reviewable implementations in minutes, not hours.\n\n\u003Cdiv style=\"padding:75% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1111782058?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Issue to MR\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n* **Convert CI File Flow built for seamless migration intelligence:** Our Convert CI File Flow streamlines migration workflows by having agents analyze existing CI/CD configurations and intelligently convert them to GitLab CI format with full pipeline compatibility. This helps eliminate the manual effort and potential errors of rewriting CI configurations from scratch, enabling teams to migrate entire deployment pipelines with confidence. 18.3 includes support for Jenkins migrations. Additional support is planned for future releases.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1111783724?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Convert to CI Flow\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n### Intelligent code and search\n\nAI point solutions typically operate with limited visibility into isolated code snippets, but GitLab's Knowledge Graph provides agents with environment context to help inform faster and more intelligent responses.\n\n* **Knowledge Graph for real-time code intelligence:** With 18.3, GitLab's Knowledge Graph now delivers real-time code indexing to enable faster code searches, delivering more accurate and contextual results. By understanding the relationships between files, dependencies, and development patterns across your entire codebase, our agents are designed to provide insights that would take human developers hours to uncover — **and this is just the first step in unlocking the powerful capabilities that are planned for Knowledge Graph.**\n\n### Enterprise governance\n\nAI transparency and organizational control are critical challenges that can hold teams back from fully adopting AI-powered development tools, with [85% of executives agreeing that agentic AI will create unprecedented security challenges](https://about.gitlab.com/software-innovation-report/).\n\nThese new features in 18.3 help address concerns around data governance, compliance requirements, and the need for visibility into AI decision-making processes so organizations can integrate AI within their existing security and policy frameworks.\n\n* **Agent Insights for transparency through intelligence:** Our built-in agent tracking provides visibility into agent decision-making processes. Users can optimize workflows and follow best practices through transparent activity tracking.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1111783244?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Agent Insights\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\u003Cp>\u003C/p>\n\n* **GitLab Duo Code Review for Self-Hosted:** This brings the intelligence of GitLab Duo to organizations with strict data governance requirements by allowing teams to keep sensitive code in controlled environments.\n* **Hybrid model configurations for flexible AI deployment:** GitLab Duo Self-Hosted customers can now use hybrid model configurations, combining self-hosted AI models via their local AI gateway with GitLab's cloud models through GitLab's AI gateway, enabling access to various features.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1111783569?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Self Hosted Models Code Review\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\u003Cp>\u003C/p>\n\n* **Enhanced security with OAuth support:** Our MCP server now includes full OAuth 2.0 authentication support, enabling secure connections to protected resources and sensitive development environments. This implementation follows the draft OAuth specification for MCP, handling authorization flows, token management, and dynamic client registration. \n\n### Secure by Design platform: Governance that scales\n\nTrue platform security requires consistent application of governance principles across every layer of the development lifecycle. The same security fundamentals that make AI adoption safe — least-privilege access, centralized policy management, proactive monitoring, and granular permissions — must be embedded throughout the entire SDLC to create a cohesive, defense-in-depth approach.\n\nGitLab 18.3 strengthens the foundational controls that help protect your entire software supply chain with these new updates:\n\n* **Custom admin role:** Provides granular, purpose-built administrative permissions, replacing blanket admin access with precise, least-privilege controls. Instead of granting blanket administrative privileges that create security risks, organizations can now create specialized roles tailored to specific functions — platform teams managing runners and monitoring, support teams handling user management, and leadership accessing dashboards and usage statistics. With complete role lifecycle management through UI and API, audit logging, and auto-generated documentation, this feature enables true least-privilege administration while helping maintain operational efficiency and improve overall instance security.\n* **Instance-level compliance framework and security policy management**: Organizations can now designate a dedicated compliance group that has the authority to apply standardized frameworks and security policies directly to top-level groups, automatically cascading enforcement to all their subgroups and projects. This centralized approach eliminates the compliance adoption blocker of fragmented policy management while maintaining group autonomy for additional local policies.\n* **Enhanced violations reporting:** Teams now receive immediate notifications when unauthorized changes are made to MR approval rules, framework policies lack proper approvals, or time-based compliance controls are violated. By directly linking violations to specific compliance framework controls, teams get actionable insights that tell them exactly which requirement was breached, turning compliance from a reactive checkbox exercise into a proactive, integrated part of the development and security workflow.\n* **Fine-grained permissions for CI/CD job tokens:** Replaces broad token access with granular, explicit permissions that grant CI/CD jobs access only to specific API endpoints they actually need. Instead of allowing jobs blanket access to project resources, teams can now define precise permissions for deployments, packages, releases, environments, and other critical resources, reducing the attack surface and potential for privilege escalation.\n* **AWS Secrets Manager integration:** Teams using AWS Secrets Manager can now retrieve secrets directly in GitLab CI/CD jobs, simplifying the build and deploy processes. Secrets are accessed by a GitLab Runner using OpenID Connect protocol-based authentication, masked to prevent exposure in job logs, and destroyed after use. This approach eliminates the need to store secrets in variables and integrates cleanly into existing GitLab and AWS-based workflows. Developed in close collaboration with Deutsche Bahn and the AWS Secrets Manager team, this integration reflects our commitment to building solutions alongside customers to solve real-world challenges.\n\n### Artifact management: Securing your software supply chain\n\nWhen artifacts aren't properly governed, small changes can have big consequences. Mutable packages, overwritten container images, and inconsistent rules across tools can trigger production outages, introduce vulnerabilities, and create compliance gaps. For enterprise DevSecOps, secure, centralized artifact management is essential for keeping the software supply chain intact.\n\n#### Enterprise-grade artifact protection in 18.3\n\nBuilding on our comprehensive package protection capabilities, GitLab 18.3 adds important new features:\n\n* **Conan revisions support:** New in 18.3, [Conan revisions](https://docs.gitlab.com/user/packages/conan_2_repository/#conan-revisions) provide package immutability for C++ developers. When changes are made to a package without changing its version, Conan calculates unique identifiers to track these changes, enabling teams to maintain immutable packages while preserving version clarity.\n* **Enhanced Container Registry security:** Following the successful launch of [immutable container tags](https://docs.gitlab.com/user/packages/container_registry/immutable_container_tags/) in 18.2, we're seeing strong enterprise adoption. Once a tag is created that matches an immutable rule, no one — regardless of permission level — can modify that container image, preventing unintended changes to production dependencies.\n\nThese enhancements complement our existing protection capabilities for npm, PyPI, Maven, NuGet, Helm charts, and generic packages, enabling platform teams to implement consistent governance across their entire software supply chain — a requirement for organizations building secure internal developer platforms.\n\nUnlike standalone artifact solutions, GitLab's integrated approach eliminates context switching between tools while providing end-to-end traceability from code to deployment, enabling platform teams to implement consistent governance across their entire software delivery pipeline.\n\n### Embedded views: Real-time visibility and reports\n\nAs GitLab projects grow in complexity, teams find themselves navigating between issues, merge requests, epics, and milestones to maintain visibility into work status. The challenge lies in consolidating this information efficiently while ensuring teams have real-time access to project progress without context switching or breaking their flow.\n**Launching real-time work status visibility in 18.3**\nGitLab 18.3's [embedded views, powered by our powerful GitLab Query Language](https://docs.gitlab.com/user/glql/#embedded-views) (GLQL), eliminate context switching by bringing live project data directly into your workflow:\n* **Dynamic views:** Insert live GLQL queries in Markdown code blocks throughout wiki pages, epics, issues, and merge requests that automatically refresh with current project states each time you load the page.\n* **Contextual personalization:** Views automatically adapt using functions like `currentUser()` and `today()` to show relevant information for whoever is viewing, without manual configuration.\n* **Powerful filtering:** Filter by 25+ fields, including assignee, author, label, milestone, health status, and creation date.\n* **Display flexibility:** Present data as tables, lists, or numbered lists with customizable field selection, item limits, and sort orders to keep your views focused and actionable\n\nUnlike fragmented project management approaches, we've designed embedded views to maintain your workflow continuity while providing real-time visibility, enabling teams to make informed decisions without losing focus or switching between multiple tools and interfaces.\n\n> Learn about the [newest features in GitLab 18.3](https://about.gitlab.com/releases/2025/08/21/gitlab-18-3-released/).\n## Get started today\nGitLab 18.3 is available now for GitLab Premium and Ultimate users on GitLab.com and self-managed environments.\n\nGitLab Dedicated customers are now upgraded to 18.2 and will be able to use the features released with GitLab 18.3 next month.\n\nReady to experience the future of software engineering?[ Enable beta and experimental features for GitLab Duo](https://docs.gitlab.com/user/gitlab_duo/turn_on_off/#turn-on-beta-and-experimental-features) and start collaborating with AI agents that understand your complete development context.\n\nNew to GitLab? [Start your free trial](https://gitlab.com/-/trials/new) today and discover why the future of software engineering is human and AI collaboration, orchestrated through the world's most comprehensive DevSecOps platform.\n\n\u003Cp>\u003Csmall>\u003Cem>This blog post contains “forward-looking statements” within the meaning of Section 27A of the Securities Act of 1933, as amended, and Section 21E of the Securities Exchange Act of 1934. Although we believe that the expectations reflected in the forward-looking statements contained in this blog post are reasonable, they are subject to known and unknown risks, uncertainties, assumptions and other factors that may cause actual results or outcomes to be materially different from any future results or outcomes expressed or implied by the forward-looking statements.\u003C/em>\u003C/p>\n\u003Cp>\u003Cem>Further information on risks, uncertainties, and other factors that could cause actual outcomes and results to differ materially from those included in or contemplated by the forward-looking statements contained in this blog post are included under the caption “Risk Factors” and elsewhere in the filings and reports we make with the Securities and Exchange Commission. We do not undertake any obligation to update or release any revisions to any forward-looking statement or to report any events or circumstances after the date of this blog post or to reflect the occurrence of unanticipated events, except as required by law.\u003C/em>\u003C/small>\u003C/p>",[717,803,9,695,693],{"featured":91,"template":698,"slug":1892},"gitlab-18-3-expanding-ai-orchestration-in-software-engineering","content:en-us:blog:gitlab-18-3-expanding-ai-orchestration-in-software-engineering.yml","Gitlab 18 3 Expanding Ai Orchestration In Software Engineering","en-us/blog/gitlab-18-3-expanding-ai-orchestration-in-software-engineering.yml","en-us/blog/gitlab-18-3-expanding-ai-orchestration-in-software-engineering",{"_path":1898,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1899,"content":1903,"config":1909,"_id":1911,"_type":14,"title":1912,"_source":16,"_file":1913,"_stem":1914,"_extension":19},"/en-us/blog/gitlab-18-4-ai-native-development-with-automation-and-insight",{"config":1900,"title":1901,"description":1902},{"noIndex":6},"GitLab 18.4: AI-native development with automation and insight","With GitLab 18.4, teams create custom agents, unlock Knowledge Graph context, and auto-fix pipelines so developers stay focused and in flow.",{"title":1901,"description":1902,"authors":1904,"heroImage":1905,"date":1906,"body":1907,"category":801,"tags":1908},[1887],"https://res.cloudinary.com/about-gitlab-com/image/upload/v1758541195/kig7sww6jyvxzmkmimbv.png","2025-09-23","As a developer, you know modern development isn't just about writing code — it's about managing change across the entire software development lifecycle. \n\nIn [GitLab 18.3](https://about.gitlab.com/blog/gitlab-18-3-expanding-ai-orchestration-in-software-engineering/), we laid the groundwork for true human-AI collaboration. We introduced leading AI tools such as Claude Code, Codex CLI, Amazon Q CLI, and Gemini CLI as native integrations to GitLab, delivered our first preview of the GitLab Model Context Protocol ([MCP](https://about.gitlab.com/topics/ai/model-context-protocol/)) server in partnership with Cursor, and shipped two new flows, Issue to MR and Convert CI File for Jenkins Flows, to help teams tackle every day problems. \n\nWith [GitLab 18.4](https://about.gitlab.com/releases/2025/09/18/gitlab-18-4-released/) we are expanding your ability to build and share custom agents, collaborate more effectively through Agentic Chat, navigate codebases with the Knowledge Graph, and keep pipelines green with the Fix Failed Pipelines Flow, while also delivering greater security and governance over your AI usage. \n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1120293274?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"18.4 Release video placeholder\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n\n> Have questions on the latest features in the GitLab 18.4 release? [Join us for The Developer Show](https://www.linkedin.com/events/q-a-code-exploringgitlab18-4and7373772262312906753/theater/) live on LinkedIn on Sept. 23 at 10:00 am PT, or on-demand shortly after! \n\n## Build your experience\n\n*Start your day by pulling from the AI Catalog — a library of specialized agents that surface priorities, automate routine work, and keep you focused on building.*\n\n### AI Catalog as your library of specialized agents (Experimental)\n\nWith GitLab 18.4, we're introducing the GitLab Duo AI Catalog — a central library where teams can create, share, and collaborate with custom-built agents across their organization. Every team has ‘their way' of doing things. So creating a custom agent is just like training a fellow engineer on the ‘right way' to do things in your organization.  \n\nFor example, a custom Product Planning agent can file bugs in the specific format, following your labeling standards, or a Technical Writer agent can draft concise documentation following your conventions, or a Security agent can make sure your security and compliance standards are met for every MR. Instead of functioning as disconnected tools, these agents become part of the natural stream of work inside GitLab — helping accelerate tasks without disrupting established processes.\n\n**Note:** This capability is currently only available on GitLab.com as an Experiment. We plan to deliver this to our self-managed customers next month in the 18.5 release. \n\n## Stay in your flow\n\n*GitLab Duo Agentic Chat makes collaboration with agents seamless.*\n\n### Smarter Agentic Chat to streamline collaboration with agents (Beta)\n\nAs the centerpiece of GitLab Duo Agent Platform (Beta), [Agentic Chat](https://docs.gitlab.com/user/gitlab_duo_chat/agentic_chat/) gives you a seamless way to collaborate with AI agents. The latest update to Agentic Chat with GitLab 18.4 improves the chat experience and expands how sessions are managed and surfaced.\n\n* **Chat with custom agent**\n\n  Let's start with your newly-created custom agent. Once designed, you can immediately put that agent to work through Agentic Chat. For example, you could ask your new agent “give me a list of assignments” to get started with your priorities for the day. Additionally, you now have the ability to start fresh conversations with new agents and resume previous conversations with agents without losing context. \n\n* [**User model selection**](https://docs.gitlab.com/user/gitlab_duo/model_selection/#select-a-model-to-use-in-gitlab-duo-agentic-chat)\n\n  With previous releases, you're able to select models at a namespace level, but in 18.4 you can now choose models at the user level for a given chat session. This empowers you to make the call on which LLM is right for the job, or experiment with different LLMs to see which delivers the best answer for your task. \n\n* **Improved formatting and visual design**\n\n  We hope you love the new visual design for GitLab Duo Agentic Chat, including improved handling of tool call approvals to ensure your experience is more enjoyable.  \n\n* **Agent Sessions available through Agentic Chat**\n\n  Sessions are expanding to become a core part of the Agentic Chat experience. Any agent run or flow now appears in the Sessions overview available from Agentic Chat. Within each session, you'll see rich details like job logs, user information, and tool metadata — providing critical transparency into how agents are working on your behalf.  \n  \n  \n  **Note:** Sessions in Agentic Chat is available on GitLab.com only, this enhancement is planned for self-managed customers next month in the 18.5 update.\n\n## Unlock your codebase\n\n*With agents, context is king. With Knowledge Graph, you can give your agents more context so they can reason faster and give you better results.* \n\n### Introducing the GitLab Knowledge Graph (Beta)\n\nThe [GitLab Knowledge Graph](https://gitlab-org.gitlab.io/rust/knowledge-graph/) in 18.4 transforms how developers and agents understand and navigate complex codebases. The Knowledge Graph provides a connected map of your entire project, linking files, routes, and references across the software development lifecycle. By leveraging tools such as go-to-definition, codebase search, and reference tracking through in-chat queries, developers gain the ability to ask precise questions like “show me all route files” or “what else does this change impact?”\nThis deeper context helps teams move faster and with more confidence — whether it's onboarding new contributors, conducting deep research across a project, or exploring how a modification impacts dependent code. The more of your ecosystem that lives in GitLab, the more powerful the Knowledge Graph becomes, giving both humans and AI agents the foundation to build with accuracy, speed, and full project awareness. In future releases, we'll be stitching all of your GitLab data into the Knowledge Graph, including plans, MRs, security vulnerabilities, and more.\nThis release of the Knowledge Graph focuses on local code indexing, where the `gkg` CLI turns your codebase into a live, embeddable graph database for RAG. You can install it with a simple one-line script, parse local repositories, and connect via MCP to query your workspace. \nOur vision for the Knowledge Graph project is twofold: building a vibrant community edition that developers can run locally today, which will serve as the foundation for a future, fully-integrated Knowledge Graph Service within GitLab.com and self-managed instances.\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1121017374?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"18.4 Knowledge Graph Demo\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n## Automate your pipeline maintenance\n\n*Fix pipeline failures faster and stay in the flow with the Fixed Failed Pipelines Flow.*\n\n### Fix Failed Pipelines Flow with business awareness\n\nKeeping pipelines green is critical for your development velocity, but traditional approaches focus only on technical troubleshooting without considering the business impact. The **Fix Failed Pipelines Flow** addresses this challenge by combining technical analysis with strategic context. For example, it can automatically prioritize fixing a failed deployment pipeline for a customer-facing service ahead of a nightly test job, or flag build issues in a high-priority release branch differently than experimental feature branches.\n\n* **Business-aware failure detection** monitors pipeline executions while understanding the importance of different workflows and deployment targets.  \n* **Contextual root cause analysis** analyzes failure logs alongside business requirements, recent changes, and cross-project dependencies to identify underlying causes.  \n* **Strategic fix prioritization** generates appropriate fixes while considering business impact, deadlines, and resource allocation priorities.  \n* **Workflow-integrated resolution** automatically creates merge requests with fixes that maintain proper review processes while providing business context for prioritization decisions.\n\nThis flow keeps pipelines green while maintaining strategic alignment, enabling automated fixes to support business objectives rather than just resolving technical issues in isolation.\n\n## Customize your AI environment\n\n*Automation only works if you trust the models behind it. That's why 18.4 delivers governance features like model selection and GitLab-managed keys.*\n\n### GitLab Duo model selection to optimize feature performance\n\n[Model selection](https://docs.gitlab.com/user/gitlab_duo/model_selection/) is now generally available, giving you direct control over which large language models ([LLMs](https://about.gitlab.com/blog/what-is-a-large-language-model-llm/)) power GitLab Duo. You and your team can select the models of your choice, apply them across the organization or tailor them per feature. You can set defaults to ensure consistency across namespaces and tools, with governance, compliance, and security requirements in mind. \n\nFor customers using GitLab Duo Self-Hosted, newly added support for GPT OSS and GPT-5 provides additional flexibility for AI-powered development workflows.\n\n**Note:** GitLab Duo Self-Hosted is not available to GitLab.com customers, and GPT models are not supported on GitLab.com. \n\n## Protect your sensitive context\n\n*Alongside governance comes data protection, giving you fine-grained control over what AI can and can't see.*\n\n### GitLab Duo Context Exclusion for granular data protection\n\nIt's no surprise — you need granular control over what information AI agents can access. **GitLab Duo Context Exclusion** in 18.4 provides project-level settings that let teams exclude specific files or file paths from AI access. Capabilities include:\n\n* **File-specific exclusions** to help protect sensitive files such as password configurations, secrets, and proprietary algorithms.  \n* **Path-based rules** to create exclusion patterns based on directory structures or file naming conventions.  \n* **Flexible configuration** to apply exclusions at the project level while maintaining development workflow efficiency.  \n* **Audit visibility** to track what content is excluded to support compliance with data governance policies.\n\nGitLab Duo Context Exclusion helps you protect sensitive data while you accelerate development with agentic AI.\n\n## Extend your AI capabilities with new MCP tools\n\n*Expanded MCP tools extend those capabilities even further, connecting your GitLab environment with a broader ecosystem of intelligent agents.*\n\n### New tools for GitLab MCP server\n\nExpanding on the initial MCP server introduced in [18.3](https://about.gitlab.com/blog/gitlab-18-3-expanding-ai-orchestration-in-software-engineering/), GitLab 18.4 adds more MCP tools — capabilities that define how MCP clients interact with GitLab. These new tools extend integration possibilities, enabling both first-party and third-party AI agents to take on richer tasks such as accessing project data, performing code operations, or searching across repositories, all while respecting existing security and permissions models. For a full list of MCP tools, including the new additions in 18.4, visit our [MCP server documentation](https://docs.gitlab.com/user/gitlab_duo/model_context_protocol/mcp_server/).\n\n## Experience the future of intelligent software development\n\nWith [GitLab Duo Agent Platform](https://about.gitlab.com/gitlab-duo/agent-platform/), engineers can begin to move from working on one issue at a time in single threaded fashion, to multi-threaded collaboration with asynchronous agents that act like teammates to get work done, faster. We are bringing to market this unique vision with our customer's preferences for independence and choice: run in your preferred cloud environments using the LLMs and AI tools that work best for you, within the security and compliance guardrails you set. \n\nAs an integral part of this innovation, GitLab 18.4 is more than a software upgrade — it's about making the day-to-day experience of developers smoother, smarter, and more secure. From reusable agents to business-aware pipeline fixes, every feature is designed to keep teams in flow while balancing speed, security, and control. For a deeper look at how these capabilities come together in practice, check out our walkthrough video.\n\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1120288083?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media; web-share\" referrerpolicy=\"strict-origin-when-cross-origin\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"A day in the life with GitLab Duo Agent Platform\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\u003Cp>\u003C/p>\n\nGitLab Premium and Ultimate users can start using these capabilities today on [GitLab.com](https://GitLab.com) and self-managed environments, with availability for [GitLab Dedicated](https://about.gitlab.com/dedicated/) customers coming next month. \n\n> **Enable beta and experimental features in GitLab Duo Agent Platform today** and experience how full-context AI can transform the way your teams build software. New to GitLab? [Start your free trial](https://about.gitlab.com/free-trial/devsecops/) and see why the future of development is AI-powered, secure, and orchestrated through the world's most comprehensive DevSecOps platform.\n\n## Stay up to date with GitLab\n\nTo make sure you're getting the latest features, security updates, and performance improvements, we recommend keeping your GitLab instance up to date. The following resources can help you plan and complete your upgrade:\n\n* [Upgrade Path Tool](https://gitlab-com.gitlab.io/support/toolbox/upgrade-path/) – enter your current version and see the exact upgrade steps for your instance  \n* [Upgrade documentation](https://docs.gitlab.com/update/upgrade_paths/) – detailed guides for each supported version, including requirements, step-by-step instructions, and best practices\n\nBy upgrading regularly, you'll ensure your team benefits from the newest GitLab capabilities and remains secure and supported.\n\nFor organizations that want a hands-off approach, consider [GitLab's Managed Maintenance service](https://content.gitlab.com/viewer/d1fe944dddb06394e6187f0028f010ad#1). With Managed Maintenance, your team stays focused on innovation while GitLab experts keep your Self-Managed instance reliably upgraded, secure, and ready to lead in DevSecOps. Ask your account manager for more information. \n\n\n*This blog post contains \"forward-looking statements\" within the meaning of Section 27A of the Securities Act of 1933, as amended, and Section 21E of the Securities Exchange Act of 1934. Although we believe that the expectations reflected in these statements are reasonable, they are subject to known and unknown risks, uncertainties, assumptions and other factors that may cause actual results or outcomes to differ materially. Further information on these risks and other factors is included under the caption \"Risk Factors\" in our filings with the SEC. We do not undertake any obligation to update or revise these statements after the date of this blog post, except as required by law.*",[803,717,695,9],{"featured":91,"template":698,"slug":1910},"gitlab-18-4-ai-native-development-with-automation-and-insight","content:en-us:blog:gitlab-18-4-ai-native-development-with-automation-and-insight.yml","Gitlab 18 4 Ai Native Development With Automation And Insight","en-us/blog/gitlab-18-4-ai-native-development-with-automation-and-insight.yml","en-us/blog/gitlab-18-4-ai-native-development-with-automation-and-insight",{"_path":1916,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1917,"content":1923,"config":1929,"_id":1931,"_type":14,"title":1932,"_source":16,"_file":1933,"_stem":1934,"_extension":19},"/en-us/blog/gitlab-achieves-pci-dss-attestation-of-compliance",{"title":1918,"description":1919,"ogTitle":1918,"ogDescription":1919,"noIndex":6,"ogImage":1920,"ogUrl":1921,"ogSiteName":685,"ogType":686,"canonicalUrls":1921,"schema":1922},"GitLab achieves PCI DSS Attestation of Compliance","Learn how our completion of the AoC as a Level 1 Service Provider, along with our broader security credentials,  helps us support customers'  compliance efforts.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673615/Blog/Hero%20Images/blog-image-template-1800x945__4_.png","https://about.gitlab.com/blog/gitlab-achieves-pci-dss-attestation-of-compliance","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab achieves PCI DSS Attestation of Compliance\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sasha Gazlay\"}],\n        \"datePublished\": \"2025-03-17\",\n      }",{"title":1918,"description":1919,"authors":1924,"heroImage":1920,"date":1926,"body":1927,"category":779,"tags":1928},[1925],"Sasha Gazlay","2025-03-17","Compliance with the [Payment Card Industry (PCI) Data Security Standard (DSS)](https://www.pcisecuritystandards.org/) helps to alleviate security vulnerabilities and protect cardholder data. The PCI standard is required of any enterprise handling credit card and related authentication data or whose services could impact the security of the cardholder data environment. GitLab, as the most comprehensive AI-powered DevSecOps platform, provides the tools and resources to support our customers’ security posture, including those handling PCI-relevant data.\n\nGitLab is pleased to announce the successful achievement of a PCI DSS Attestation of Compliance (AoC) as a Level 1 Service Provider. The AoC comes after an independent Qualified Security Assessor-led validation of our alignment to the PCI DSS. The attestation, in addition to GitLab’s broader compliance credentials, demonstrates the dedication to our mission of being the leading example in security, innovation, and [transparency](https://handbook.gitlab.com/handbook/values/#transparency) in our information security practices.\n\nPlease visit GitLab’s [Trust Center](https://about.gitlab.com/security/) to view our AoC as well as details on shared responsibilities when relying on our attestation. Also, check out [our PCI compliance page](https://about.gitlab.com/compliance/pci-compliance/), which explains how GitLab can support your compliance efforts.",[9,693,1133],{"slug":1930,"featured":6,"template":698},"gitlab-achieves-pci-dss-attestation-of-compliance","content:en-us:blog:gitlab-achieves-pci-dss-attestation-of-compliance.yml","Gitlab Achieves Pci Dss Attestation Of Compliance","en-us/blog/gitlab-achieves-pci-dss-attestation-of-compliance.yml","en-us/blog/gitlab-achieves-pci-dss-attestation-of-compliance",{"_path":1936,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1937,"content":1943,"config":1948,"_id":1950,"_type":14,"title":1951,"_source":16,"_file":1952,"_stem":1953,"_extension":19},"/en-us/blog/gitlab-at-aws-re-invent-2023",{"title":1938,"description":1939,"ogTitle":1938,"ogDescription":1939,"noIndex":6,"ogImage":1940,"ogUrl":1941,"ogSiteName":685,"ogType":686,"canonicalUrls":1941,"schema":1942},"GitLab at AWS re:Invent 2023","GitLab and AWS have streamlined development and security for DevSecOps teams. Learn how in lightning talks, sessions, live demos, and more.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749664472/Blog/Hero%20Images/gitlabflatlogomap.png","https://about.gitlab.com/blog/gitlab-at-aws-re-invent-2023","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab at AWS re:Invent 2023\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Darwin Sanoy\"}],\n        \"datePublished\": \"2023-11-22\",\n      }",{"title":1938,"description":1939,"authors":1944,"heroImage":1940,"date":1945,"body":1946,"category":1277,"tags":1947},[714],"2023-11-22","GitLab will be at AWS re:Invent 2023 in Las Vegas, November 27 to December 1, to demonstrate how the GitLab DevSecOps Platform on Amazon Web Services delivers secure, enterprise-grade AI throughout the software development lifecycle. Stop by Booth #1152 in the Security Zone for [lightning talks, live demos, customer sessions, and more](https://about.gitlab.com/events/aws-reinvent/) all week. \n\nMake sure to [check out our event page and calendar](https://about.gitlab.com/events/aws-reinvent/) to find sessions, locations, opportunities to meet with GitLab, and more (note, they do not appear in the AWS event app). Some sessions will also be available on-demand after the conference.\n\nHere are some of the lightning talks GitLab will be presenting:\n\n**Frictionless developer experience: Using human habits to accelerate DevSecOps maturity and increase joy**\n\nGitLab’s long-standing approach to building DevSecOps pipelines aligns with AWS’ new emphasis on frictionless developer experiences. Join this session to learn how the GitLab DevSecOps platform represents a true “shift left” by empowering and streamlining developers’ normal workflow.\n\n[Add to calendar - Nov. 30](https://content.gitlab.com/viewer/65412018ca9e0b9d4b50acb2?iid=654966e4f2269af78f005ba1)\n\n**New integrations and solutions for using GitLab and AWS together**\n\nIn recent months, AWS and GitLab have built new service integrations for source control, CI, and CD. You'll learn how GitLab integrates with AWS CodeStar Connections, Amazon CodeGuru, OpenID, and more, as well as development and deployment solutions for Serverless.com Framework and Terraform to AWS.\n\nAdd to calendar\n* [Nov. 28](https://content.gitlab.com/viewer/65412018ca9e0b9d4b50acb2?iid=654144eef011a50313dc7113)\n* [Nov. 29](https://content.gitlab.com/viewer/65412018ca9e0b9d4b50acb2?iid=654942dfef8fa23b213f0eca)\n* [Nov. 30](https://content.gitlab.com/viewer/65412018ca9e0b9d4b50acb2?iid=65494b66a0b8daf9ca33a386)\n\n**Secure and assured Terraform development using GitLab security scanning policies and managed DevOps environments**\n\nThis lightning talk discusses and demonstrates working example code that extends GitLab's existing support for Terraform State management with full lifecycle-managed DevOps environments for merge requests, long-lived pre-production environments, production environments, and one-off experimental environments. Whether you are developing infrastructure as code specifically or embedding it with application code for the sake of easy environment support, this lightning talk has something to offer you.\n\n[Add to calendar - Nov. 28](https://content.gitlab.com/viewer/65412018ca9e0b9d4b50acb2?iid=654961043165b6f013635639)\n\n**Secure GitLab CD pipelines to AWS with OpenID Federation, OIDC, and JWT**\n\nGitLab has three ways to authenticate and authorize your CI and CD workloads into AWS environments. Adding and refining OpenID provides the ability to use an industry standard, which is the most advanced of the three. Join us to learn how to accomplish this highly secure integration option.\n\n[Add to calendar - Nov. 29](https://content.gitlab.com/viewer/65412018ca9e0b9d4b50acb2?iid=6549580763edc0caa46ea061)\n\n**Security intelligence through full integration of Amazon CodeGuru Security into GitLab**\n\nAWS CodeGuru Security has created a full integration that enables you to view scanner results in GitLab merge requests and security dashboards so you can use them to block merges in security policy merge approval rules — just like GitLab’s integrated security scanning results. Attend this lightning talk to learn more.\n\n[Add to calendar - Nov. 28](https://content.gitlab.com/viewer/65412018ca9e0b9d4b50acb2?iid=654953f963edc0cdbf6e8c6f)\n\n## GitLab and AWS: The year in review\nThroughout 2023, GitLab and AWS announced partner designations and new service integrations that enable development, security, and operations teams to collaborate more easily, to take advantage of AI at all stages, and to flexibly scale infrastructure to create and deploy secure software faster. \n\n#### AWS recognized GitLab as a partner in several categories\n\n- **AWS DevSecOps Partner Competency Specialty:** This specialty denotes that GitLab makes it easy for customers to [integrate security across every stage](https://about.gitlab.com/blog/aws-devsecops-competency-partner/) of the development and delivery cycles, providing rapid and contextual feedback to development, security, and ops teams.\n\n-  **Amazon Linux 2023 Ready Partner:** Amazon Linux 2023-specific RPM packages are available for GitLab, starting at [Version 16.3.0](https://docs.gitlab.com/ee/administration/package_information/supported_os.html) and for GitLab Runner. Official GitLab support for Amazon Linux 2023 also means GitLab builds the RPM packages and hosts them on our packages infrastructure, Graviton (arm64) and amd64 architectures are both supported. To install GitLab on Amazon Linux 2023, [follow these instructions](https://about.gitlab.com/install/#amazonlinux-2023). \n\nLearn more about [GitLab's AWS partner designations](https://docs.gitlab.com/ee/solutions/cloud/aws/gitlab_aws_partner_designations.html).\n\n#### AWS CodeStar Connections opens up a host of AWS service integrations\n\nAWS recently completed the integration of GitLab.com SaaS into its AWS CodeStar Connections service. This service is a foundational, shared service used by many other AWS services to connect to Git repositories outside of AWS. As a result, GitLab was immediately available to AWS services once this integration was completed.\n\nGitLab is available at CodeStar Connections throughout many AWS services for connectivity to Git. In addition, using a CodeStar Connection for an AWS CodePipeline opens up other service integrations that primarily rely on CodePipeline as their key integration point.\n\nHere is a visual map of the integrations that are currently available:\n\n![CodeStar Connections integrations](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749676883/Blog/Content%20Images/gitlabcodestarconnectionsintegration.png)\n\n#### AI customization with AWS CodeWhisperer\n[AWS CodeWhisperer's customization capability](https://aws.amazon.com/blogs/aws/new-customization-capability-in-amazon-codewhisperer-generates-even-better-suggestions-preview/) leverages CodeSuite Connections, allowing generative code suggestions to take into account the libraries and design patterns of your current application when suggesting new code. It does so with no ingestion of your code into the general LMM creation. AWS CodeWhisperer can be pointed to a GitLab repository. \n\n#### AWS CodeGuru and GitLab Ultimate secure scanning integration\nThe AWS CodeGuru team [built an integration with GitLab CI](https://docs.gitlab.com/ee/solutions/cloud/aws/gitlab_aws_integration.html#scm-integrations) as part of their build secure scanning capabilities. [Amazon CodeGuru Security findings](https://docs.aws.amazon.com/codeguru/latest/security-ug/get-started-gitlab.html) use GitLab’s vulnerability report formatting, enabling exports to integrate directly into GitLab Ultimate security features such as merge request views, security dashboards, and in-context remediation solutions and training. Importantly, it allows these findings to be addressed by GitLab Security Policy Merge Approval Rules. \n\n#### GitLab's new single-tenant Saas option sits atop AWS\nEarlier this year, GitLab launched [GitLab Dedicated](https://docs.gitlab.com/ee/subscriptions/gitlab_dedicated/), a single-tenancy solution for organizations in highly regulated industries that have complex regulatory, compliance, and data residency requirements. The fully isolated SaaS offering is hosted and managed by GitLab and deployed on AWS in a cloud region of the customer's choosing. [Learn more about how GitLab built GitLab Dedicated](https://about.gitlab.com/blog/building-gitlab-with-gitlabcom-how-gitlab-inspired-dedicated/).\n\n## Plan your week at AWS re:Invent\nFill your calendar with GitLab at AWS re:Invent! [Check out our calendar](https://about.gitlab.com/events/aws-reinvent/) of sponsored sessions, lightning talks, live demos, and more throughout the week at Booth #1152.\n",[805,951,9,284],{"slug":1949,"featured":91,"template":698},"gitlab-at-aws-re-invent-2023","content:en-us:blog:gitlab-at-aws-re-invent-2023.yml","Gitlab At Aws Re Invent 2023","en-us/blog/gitlab-at-aws-re-invent-2023.yml","en-us/blog/gitlab-at-aws-re-invent-2023",{"_path":1955,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1956,"content":1962,"config":1969,"_id":1971,"_type":14,"title":1972,"_source":16,"_file":1973,"_stem":1974,"_extension":19},"/en-us/blog/gitlab-at-next-25-transforming-app-modernization",{"title":1957,"description":1958,"ogTitle":1957,"ogDescription":1958,"noIndex":6,"ogImage":1959,"ogUrl":1960,"ogSiteName":685,"ogType":686,"canonicalUrls":1960,"schema":1961},"GitLab at Next '25: Transforming app modernization","GitLab participated in Google Cloud Next ‘25 and received a fifth consecutive Google Cloud Technology Partner of the Year recognition.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663121/Blog/Hero%20Images/LogoLockupPlusLight.png","https://about.gitlab.com/blog/gitlab-at-next-25-transforming-app-modernization","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab at Next '25: Transforming app modernization\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Regnard Raquedan\"}],\n        \"datePublished\": \"2025-04-11\",\n      }",{"title":1957,"description":1958,"authors":1963,"heroImage":1959,"date":1965,"body":1966,"category":1133,"tags":1967},[1964],"Regnard Raquedan","2025-04-11","GitLab's presence at Google Cloud Next '25 highlighted our strong partnership with Google Cloud and our joint commitment to accelerating software development and delivery. We were recognized again as a Technology Partner of the Year, and included in key enterprise initiatives like Google Distributed Cloud (GDC) Build Partners and [Startup Perks from Google Cloud](https://cloud.google.com/blog/topics/startups/why-global-startups-are-gathering-at-google-cloud-next25?e=13802955). Our team members demonstrated for attendees how GitLab is positioned to be a critical DevSecOps service for Google Cloud customers.\n\n## Continuing our award-winning partnership excellence\n\n\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752175937/Blog/nempa4yvfutedz3fpuxx.jpg\" alt=\"GitLab team at Google Cloud Next '25\" align=\"left\" width=\"400px\" style=\"padding-right: 20px; padding-bottom: 10px\"/>\n\nWe're thrilled to announce that GitLab has once again been named a [Google Cloud Technology Partner of the Year award winner](https://about.gitlab.com/press/releases/2025-04-08-gitlab-wins-a-google-cloud-technology-partner-of-the-year-award-for-devops/), marking our fifth consecutive time receiving this prestigious honor. This remarkable achievement reaffirms our position as Google Cloud's primary DevOps partner, consistently delivering exceptional value year after year. The continued recognition highlights how our collaboration with Google Cloud creates tangible business outcomes for customers, enabling organizations across industries to build, secure, and deploy applications with efficiency and confidence.\n\n## Google Distributed Cloud: DevSecOps for highly regulated environments\n\nAnother significant milestone announced at Next '25 was GitLab's \"Google Cloud Ready - Distributed Cloud\" certification. This designation enables organizations to implement GitLab in air-gapped environments, addressing critical security and compliance requirements.\n\nAs an end-to-end DevSecOps solution available on Google Distributed Cloud, GitLab enables sovereign development and operations for workloads critical to national security and regulatory compliance. This integration is particularly valuable for government agencies and financial institutions that require the highest levels of data sovereignty while maintaining modern development practices.\n\n## GitLab perks for Google Startups\n\nGitLab is a Featured Partner of the new Startup Perks program from Google Cloud. This partnership ties up with our own [GitLab for Startups](https://about.gitlab.com/solutions/startups/google-cloud/) and is meant to jumpstart new tech ventures with key DevSecOps capabilities that can help with fast growth and scaling.\n\nAs one of the [Featured Perks partners](https://cloud.google.com/startup/perks), eligible startups can get free or discounted access to one year of [GitLab Ultimate](https://about.gitlab.com/pricing/ultimate/) for 20 licenses. For seed or early stage startups, this benefit can help ensure collaboration, efficiency, and security without sacrificing speed and agility.\n\n## Thoughts from the dais\n\nGitLab experts shared valuable insights across multiple speaking sessions at Next '25, delivering practical knowledge on AI-powered DevSecOps, platform engineering, and cloud application delivery:\n\n* __[AI DevOps panel](https://cloud.withgoogle.com/next/25/session-library?session=BRK2-163&utm_source=copylink&utm_medium=unpaidsoc&utm_campaign=FY25-Q2-global-EXP106-physicalevent-er-next25-mc&utm_content=reg-is-live-next-homepage-social-share&utm_term=-):__ Mike Flouton, GitLab Vice President of Product Management, joined industry leaders to discuss how AI code assist tools boost productivity while enhancing application performance.\n\n* __[Software Logistics - The Missing Link in Modern Platform Engineering](https://cloud.withgoogle.com/next/25/session-library?session=CT2-16&utm_source=copylink&utm_medium=unpaidsoc&utm_campaign=FY25-Q2-global-EXP106-physicalevent-er-next25-mc&utm_content=reg-is-live-next-homepage-social-share&utm_term=-):__ GitLab Field CTO Lee Faus explored how effective software logistics create the foundation for successful platform engineering initiatives.\n\n* __[Revolutionizing Cloud Application Delivery with Intelligent Agents](https://cloud.withgoogle.com/next/25/session-library?session=CT2-17&utm_source=copylink&utm_medium=unpaidsoc&utm_campaign=FY25-Q2-global-EXP106-physicalevent-er-next25-mc&utm_content=reg-is-live-next-homepage-social-share&utm_term=-):__ Faus also demonstrated how intelligent agents are transforming cloud application delivery pipelines.\n\n## Engaging attendees across Next '25\n\nIn addition to our speaking sessions, GitLab maintained a strong presence throughout Next '25. At our booth #2170 on the expo floor, our team engaged with hundreds of attendees through demonstrations and lightning talks featuring both GitLab experts and partners like Arctiq and SADA.\n\nThe Google Cloud Makerspace's Dev Tools Pantry became a hub of innovation and collaboration. John Coghlan, Director of Developer Advocacy, observed: \"It was great to connect with many GitLab and Google Cloud customers in the Dev Tools Pantry in the Makerspace. We loved seeing the creative solutions that people came up with around developer experience and simplified deployments using GitLab and Google Cloud as their ingredients.\"\n\nThese hands-on experiences showcased how GitLab's DevSecOps solutions integrate well with Google Cloud services, with our AI-powered capabilities demonstrations drawing particular interest from attendees looking to enhance developer productivity and application security.\n\n## GitLab and Google Cloud: Transforming the future together\n\nThe energy witnessed at Next '25 exemplifies why GitLab and Google Cloud make such powerful partners. Together, we help organizations to transform how they build, secure, and deploy applications through:\n\n* AI-assisted development capabilities and collaborative workflows that can help accelerate innovation in Google Cloud environments\n\n* Shift-left security approach that integrates with Google Cloud's security-first architecture to identify vulnerabilities early in the development lifecycle\n\n* Flexible deployment options and comprehensive observability that work harmoniously with Google Cloud infrastructure to help streamline operations\n\nAs demonstrated at Next '25, the GitLab and Google Cloud partnership delivers tangible advantages for development teams facing real-world challenges – whether accelerating AI adoption, strengthening security in regulated environments, or streamlining complex deployment pipelines. The technical integration points and customer success stories shared throughout the event underscore that this collaboration continues to produce practical solutions that matter.\n\n> #### Discover how GitLab and Google Cloud can transform your application development experience at [GitLab's Google Cloud partnership page](https://about.gitlab.com/partners/technology-partners/google-cloud-platform/).",[1968,9,279,284,1133],"google",{"slug":1970,"featured":6,"template":698},"gitlab-at-next-25-transforming-app-modernization","content:en-us:blog:gitlab-at-next-25-transforming-app-modernization.yml","Gitlab At Next 25 Transforming App Modernization","en-us/blog/gitlab-at-next-25-transforming-app-modernization.yml","en-us/blog/gitlab-at-next-25-transforming-app-modernization",{"_path":1976,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1977,"content":1983,"config":1991,"_id":1993,"_type":14,"title":1994,"_source":16,"_file":1995,"_stem":1996,"_extension":19},"/en-us/blog/gitlab-dark-mode-is-getting-a-new-look",{"title":1978,"description":1979,"ogTitle":1978,"ogDescription":1979,"noIndex":6,"ogImage":1980,"ogUrl":1981,"ogSiteName":685,"ogType":686,"canonicalUrls":1981,"schema":1982},"GitLab dark mode is getting a new look","GitLab is enhancing dark mode for a cleaner, more polished experience, with incremental updates to improve usability and visual consistency. Get a sneak peek at this new design.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098523/Blog/Hero%20Images/Blog/Hero%20Images/hero%20%282%29_7nhIrZ08jWcLxhaH9rfbk1_1750098523498.png","https://about.gitlab.com/blog/gitlab-dark-mode-is-getting-a-new-look","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab dark mode is getting a new look\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sascha Eggenberger\"},{\"@type\":\"Person\",\"name\":\"Chris Micek\"},{\"@type\":\"Person\",\"name\":\"Jeremy Elder\"}],\n        \"datePublished\": \"2024-10-15\",\n      }",{"title":1978,"description":1979,"authors":1984,"heroImage":1980,"date":1988,"body":1989,"category":717,"tags":1990},[1985,1986,1987],"Sascha Eggenberger","Chris Micek","Jeremy Elder","2024-10-15","Dark mode has become an essential feature, providing a darker background with lighter content to reduce eye strain, enhance readability, and maintain continuity with system-wide settings. While we currently offer an experimental version of dark mode in GitLab, customers requested some improvements. We’ve taken that feedback seriously and now we’re excited to share our vision for the future of dark mode on the DevSecOps platform, and how we plan to roll this out over the next several months.\n\n## Challenges with the current dark mode\n\nGitLab’s dark mode, launched in 2020, has remained in an alpha state, largely due to its initial approach of algorithmically inverting colors. While this method allowed us to quickly offer a basic dark background with lighter text, there are several issues that require taking a different approach.\n\nThe current dark mode suffers from inconsistent visual hierarchy — some elements, like alerts, stand out too much, while others fade into the background. An overuse of color also makes certain elements, such as alerts and badges, overly saturated, distracting from key content. Additionally, some elements emit too much light, causing visual strain for those using GitLab for long periods of time.\n\nDue to these issues and the complexity of implementing fixes, our experimental dark mode has remained below our standard of quality, with many one-off adjustments adding unnecessary complexity. We know there’s room for improvement, and that’s why we’re committed to enhancing this experience in a way that feels seamless, comfortable, and visually appealing.\n\n## Principles guiding the new direction\n\nTo create a more cohesive dark mode, we’ve developed several design principles that will guide the new iterations. Similar to our work on a [Dark UI for GitLab’s Web IDE](https://about.gitlab.com/blog/creating-a-dark-ui-for-gitlabs-web-ide/), these principles and ideas helped us focus on ensuring the experience is not only aesthetically pleasing but also functional and accessible.\n\nTo see how these principles are applied in practice with examples watch this [walkthrough by Jeremy Elder](https://www.youtube.com/watch?v=QdiV6lRSFpE), a staff product designer working on dark mode.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n\u003Ciframe src=\"https://www.youtube.com/embed/QdiV6lRSFpE?si=kFssresabK0JJrug\" title=\"GitLab Dark Mode\" frameborder=\"0\" allowfullscreen=\"true\">\u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### 1. Forward elements are lighter, receding ones are darker\n\nThis mimics natural light behavior: brighter elements come forward, while darker ones recede. In dark mode, brighter elements create depth, ensuring important content stands out without relying heavily on borders or shadows.\n\n![Using surfaces to make important components stand out more.](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098534/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750098533970.png)\n\u003Ccenter>\u003Ci>Using surfaces to make important components stand out more.\u003C/i>\u003C/center>\n\u003Cbr>\n\n![Applying the new design principles can create a visually better structured and more meaningful UI.](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098534/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750098533970.png)\n\n\u003Ccenter>\u003Ci>Applying the new design principles can create a visually better structured and more meaningful UI.\u003C/i>\u003C/center>\n\n### 2. Reduced color saturation\nIn a dark UI, color naturally stands out more, so we’re reducing the amount of color used. Instead of flooding backgrounds with color, we’re using color more selectively to draw attention where it’s needed.\n\n![Alerts are quieter than before.](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098534/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098533971.png)\n\n\u003Ccenter>\u003Ci>Alerts are quieter than before.\u003C/i>\u003C/center>\n\n### 3. Dimmed, not inverted\n\nWe’re approaching dark mode as “dimming the lights” rather than fully inverting the interface. This means we’re making careful decisions about which elements to darken and which to brighten, so the content remains clear while the background recedes appropriately.\n\n![Dark mode subtly dims the background while keeping key content clear and visible instead of inverting colors.](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098534/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098533971.png)\n\n\u003Ccenter>\u003Ci>Dark mode subtly dims the background while keeping key content clear and visible instead of inverting colors.\u003C/i>\u003C/center>\n\n## Iterative implementation\n\nThe new GitLab dark mode won’t be rolled out all at once. Instead, we’re taking an iterative approach, releasing updates incrementally. This process will begin with elements of the Pajamas Design System and gradually expand across the rest of the product. If you’re using the current dark mode, you’ll start to notice subtle changes to colors, contrast, typography, and component styling, all working towards our vision of a more polished, cohesive dark mode. \n\n> Follow our [progress in the dark mode epic](https://gitlab.com/groups/gitlab-org/-/epics/2902) as we continue working towards this vision.\n\n## Read more\n-  [Get to know the new GitLab typefaces](https://about.gitlab.com/blog/new-typefaces-in-gitlab/)\n- [Beautifying our UI: Giving GitLab build features a fresh look](https://about.gitlab.com/blog/beautifying-of-our-ui/)\n- [How visualization improves the GitLab merge train experience](https://about.gitlab.com/blog/how-visualization-improves-the-gitlab-merge-train-experience/)\n",[1031,1032,717,9],{"slug":1992,"featured":91,"template":698},"gitlab-dark-mode-is-getting-a-new-look","content:en-us:blog:gitlab-dark-mode-is-getting-a-new-look.yml","Gitlab Dark Mode Is Getting A New Look","en-us/blog/gitlab-dark-mode-is-getting-a-new-look.yml","en-us/blog/gitlab-dark-mode-is-getting-a-new-look",{"_path":1998,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":1999,"content":2005,"config":2011,"_id":2013,"_type":14,"title":2014,"_source":16,"_file":2015,"_stem":2016,"_extension":19},"/en-us/blog/gitlab-dedicated-for-government-now-fedramp-authorized",{"title":2000,"description":2001,"ogTitle":2000,"ogDescription":2001,"noIndex":6,"ogImage":2002,"ogUrl":2003,"ogSiteName":685,"ogType":686,"canonicalUrls":2003,"schema":2004},"GitLab Dedicated for Government now FedRAMP-authorized","Learn how our single-tenant SaaS solution empowers public sector customers to securely accelerate their modernization initiatives.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662023/Blog/Hero%20Images/display-dedicated-for-government-article-image-0679-1800x945-fy26.png","https://about.gitlab.com/blog/gitlab-dedicated-for-government-now-fedramp-authorized","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Dedicated for Government now FedRAMP-authorized\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Deepa Mahalingam\"},{\"@type\":\"Person\",\"name\":\"Elisabeth Burrows\"}],\n        \"datePublished\": \"2025-05-19\",\n      }",{"title":2000,"description":2001,"authors":2006,"heroImage":2002,"date":2008,"body":2009,"category":717,"tags":2010},[2007,844],"Deepa Mahalingam","2025-05-19","We're excited to announce that GitLab Dedicated for Government has achieved FedRAMP Authorization at the Moderate Impact Level, marking a significant milestone in our commitment to serving public sector organizations. GitLab Dedicated for Government is now listed as \"Authorized\" on the [FedRAMP Marketplace](https://marketplace.fedramp.gov/products/FR2411959145). Our single-tenant solution provides all the benefits of an enterprise DevSecOps platform with enhanced data residency, isolation, and private networking capabilities to meet the most stringent compliance requirements. GitLab Dedicated for Government provides the flexibility and scalability of a SaaS solution, enabling government agencies to modernize and secure their software supply chain from code to cloud. \n\nThe [Federal Risk and Authorization Management Program](https://www.fedramp.gov/) (FedRAMP) is the gold standard for cloud security across US government agencies. As a mandatory security framework for federal cloud adoption since its 2011 launch, it provides a standardized approach to security assessment, authorization, and continuous monitoring for cloud products and services. \n\n## Meeting the growing demand for secure cloud solutions\n\nAs more public sector organizations move away from costly legacy systems and migrate their mission-critical workloads to the cloud, cloud-native development and multi-cloud adoption will grow significantly. At GitLab, we serve a wide variety of customers in the public sector – from federally-funded research and development centers, service providers, and contractors working on behalf of the government, to the largest government agencies across the globe. We understand that no single deployment model will serve the needs of all of our customers. \n\nOur customers have told us they need a SaaS offering that provides additional deployment control and data residency to meet stringent compliance requirements. We see this need with large enterprises and companies in regulated industries that are coming under increased scrutiny, facing global internet policy fragmentation, and dealing with the expanding complexity of data governance. GitLab has consistently observed that security is a top priority for organizations and our [2024 Global DevSecOps Survey](https://about.gitlab.com/developer-survey/) showed that this trend continued, with security remaining the primary investment area.\n\nGitLab Dedicated for Government was specifically designed to address these needs – enabling organizations to accelerate their digital transformation initiatives with confidence.\n\n## Key benefits of GitLab Dedicated for Government\n\n**1. Toolchain consolidation**\n\nToolchain management continues to be a significant challenge for DevSecOps teams. According to our 2024 Global DevSecOps Survey, 64% of respondents expressed the need to consolidate their toolchains, with security professionals particularly affected – 63% reported using six or more tools.\n\nThis proliferation of tools results in unnecessary expenditure and introduces complexities and vulnerabilities that increase the risk of cyber attacks. GitLab Dedicated for Government unites DevSecOps teams on a single platform with a unified workflow, eliminating the need to purchase or maintain multiple tools. Organizations can strengthen security, improve efficiency, and accelerate collaboration by consolidating their complex toolchains. Additionally, consolidation supports zero trust architecture implementation by centralizing access control, making it easier to enforce consistent security policies and authentication requirements across the entire development lifecycle. GitLab also enables flexibility by allowing you to utilize existing critical tools through our integration capabilities.\n\n**2. Data residency and protection**\n\nGitLab Dedicated for Government is built on FedRAMP-authorized infrastructure that meets U.S. data sovereignty requirements, including access restricted to U.S. citizens. To further enhance data protection, our solution supports secure, private connections between the customer's virtual private cloud network and GitLab, ensuring users, data, and services have secure access to isolated instances without direct internet exposure. All data is [encrypted at rest](https://docs.gitlab.com/subscriptions/gitlab\\_dedicated/\\#data-encryption) and in transit using the latest encryption standards, with the option to use your own AWS Key Management Service encryption key for data at rest, giving you full control over stored data. GitLab Dedicated for Government also ensures CVEs are patched continuously. It is an ideal platform for teams to build a centralized DevSecOps platform while offboarding compliance burdens to GitLab.\n\n**3. Managed and hosted by GitLab**\n\nOur solution is single-tenant (providing physical isolation from other customers), U.S.-based, privately connected, and fully managed and hosted by GitLab. Organizations can quickly realize the value of a comprehensive DevSecOps platform with the advanced flexibility and customization of a self-managed instance – without requiring staff to build and manage infrastructure.\n\nThis approach delivers all the benefits of GitLab – shorter cycle times, lower costs, stronger security, and more productive developers – with a lower total cost of ownership and quicker time-to-value compared to self-hosting.\n\n**4. Comprehensive native security capabilities**\n\nOur 2024 Global DevSecOps Survey revealed that 60% of public sector security professionals report vulnerabilities are mostly discovered after code is merged into test environments, and only 51% consider their DevSecOps practices mature and well-ingrained. GitLab's comprehensive security scanning capabilities, built into the DevSecOps platform,  provide superior control and protection throughout the entire software development lifecycle, helping public sector organizations address these issues. These features eliminate the need for third-party security tools that could potentially compromise compliance. \n\nFor example, organizations gain access to a complete suite of native security scanners including API Security, Container Scanning, Dynamic Application Security Testing, and Fuzz Testing. This integrated approach ensures federal security standards are met without disrupting development workflows.\n\nWith the GitLab DevSecOps unified platform, public sector organizations avoid the painful scenario of discovering security limitations mid-implementation and having to choose between compromising on security features or implementing non-compliant solutions. \n\n## How to get started with GitLab Dedicated for Government\n\nGitLab Dedicated for Government provides the efficiencies of the cloud combined with infrastructure-level isolation and data residency controls. To learn more about how GitLab Dedicated for Government can help secure your software supply chain, reach out to our [sales team](https://about.gitlab.com/sales/). Whether you are a new customer or looking to migrate from your existing GitLab instance, we will ensure a smooth transition with comprehensive [migration support](https://about.gitlab.com/services/) tailored to your needs. \n\n**Note:** GitLab has also achieved the [Texas Risk and Authorization Management Program Certification](https://dir.texas.gov/resource-library-item/tx-ramp-certified-cloud-products) (TX_RAMP), which allows us to work with Texas state agencies.",[9,695,1133,717,186],{"slug":2012,"featured":91,"template":698},"gitlab-dedicated-for-government-now-fedramp-authorized","content:en-us:blog:gitlab-dedicated-for-government-now-fedramp-authorized.yml","Gitlab Dedicated For Government Now Fedramp Authorized","en-us/blog/gitlab-dedicated-for-government-now-fedramp-authorized.yml","en-us/blog/gitlab-dedicated-for-government-now-fedramp-authorized",{"_path":2018,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2019,"content":2025,"config":2030,"_id":2032,"_type":14,"title":2033,"_source":16,"_file":2034,"_stem":2035,"_extension":19},"/en-us/blog/gitlab-duo-amazon-q-transform-ideas-into-code-in-minutes",{"title":2020,"description":2021,"ogTitle":2020,"ogDescription":2021,"noIndex":6,"ogImage":2022,"ogUrl":2023,"ogSiteName":685,"ogType":686,"canonicalUrls":2023,"schema":2024},"GitLab Duo + Amazon Q: Transform ideas into code in minutes","The new GitLab Duo with Amazon Q integration analyzes your issue descriptions and automatically generates complete working code solutions, accelerating development workflows.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097127/Blog/Hero%20Images/Blog/Hero%20Images/Screenshot%202024-11-27%20at%204.55.28%E2%80%AFPM_4VVz6DgGBOvbGY8BUmd068_1750097126673.png","https://about.gitlab.com/blog/gitlab-duo-amazon-q-transform-ideas-into-code-in-minutes","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Duo + Amazon Q: Transform ideas into code in minutes\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Cesar Saavedra\"}],\n        \"datePublished\": \"2025-04-28\",\n      }",{"title":2020,"description":2021,"authors":2026,"heroImage":2022,"date":2027,"body":2028,"category":801,"tags":2029},[798],"2025-04-28","Have you ever spent days or even weeks converting a complex issue into working code? We've all been there. You start with a solid idea and a clear set of requirements, but the path from that initial concept to deployable code can be frustratingly long. Your productivity gets bogged down in implementation details, and projects that should move quickly end up dragging on.\n\nThis is where the power of [agentic AI](https://about.gitlab.com/topics/agentic-ai/) capabilities comes in. [GitLab Duo with Amazon Q](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws/), which combines the comprehensive AI-powered DevSecOps platform with the deepest set of cloud computing capabilities, is designed to dramatically accelerate your application development process, all within your familiar GitLab workflow. By streamlining your path from idea to deployment, this powerful integration can propose implementation solutions based on your issue descriptions alone – transforming what used to take days into something that happens in minutes.\n\n## How it works: From issue to working code\n\nLet's walk through how this agentic AI feature works in practice. Imagine you're a developer tasked with creating a mortgage calculator application. Here's how GitLab Duo with Amazon Q helps you get it done:\n\n1. **Create an issue with detailed requirements:** Start by creating a standard [GitLab issue](https://docs.gitlab.com/user/project/issues/). In the description, you'll provide a comprehensive list of requirements that your service needs to meet. This becomes the blueprint for your solution.\n\n2. **Invoke Amazon Q with a quick action:** Once your issue is created, simply add a comment with a quick action, “/q dev”, to invoke Amazon Q. This is where the magic begins. \n\n3. **Let AI generate your implementation:** GitLab Duo with Amazon Q analyzes the issue description you've provided and the context of your source code, then autonomously generates code that meets all your stated requirements. It doesn't stop there – it actually commits those changes in a merge request, ready for your review.\n\n![GitLab Duo  with Amazon Q activity pop-up screenshot](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097156/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097156018.png)\n\n4. **Review the generated application**: Navigate into the merge request to review the generated code. You can verify that all your requirements have been met and make any necessary adjustments.\n\n5. **Test the proposed application**: Finally, check that the application runs successfully. With minimal effort on your part, you now have working code that implements your original requirements.\n\n## Improve your development process\n\nGitLab Duo with Amazon Q completely transforms this process, including dramatically decreasing the time it takes to carry out complex developer tasks, through intelligent automation. By leveraging an agentic AI approach, you can accelerate your path from idea to deployment, freeing development teams to focus on more strategic work.\n\nWith GitLab Duo and Amazon Q, you'll develop software faster, more efficiently, and with less manual coding effort. This integration helps you:\n\n* **Save valuable development time** by automating implementation based on requirements  \n* **Maintain consistency** in code generation across your projects  \n* **Reduce the cognitive load** of translating requirements into working code  \n* **Accelerate your release cycles** by removing implementation bottlenecks  \n* **Focus your expertise** on reviewing and optimizing, rather than writing boilerplate code\n\nReady to see GitLab Duo with Amazon Q in action? Watch our demo video to discover how you can transform your development workflow today.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/jxxzNst3jpo?si=j_LQdZhUnwqoQEst\" title=\"GitLab Duo with Amazon Q demo video for dev workflow\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n> To learn more about GitLab Duo with Amazon Q visit us at an upcoming [AWS Summit in a city near you](https://about.gitlab.com/events/aws-summits/) or [reach out to your GitLab representative](https://about.gitlab.com/partners/technology-partners/aws/#form).\n\n## GitLab Duo with Amazon Q resources\n\n- [GitLab Duo with Amazon Q: Agentic AI optimized for AWS generally available](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws/)\n- [GitLab and AWS partner page](https://about.gitlab.com/partners/technology-partners/aws/)\n- [GitLab Duo with Amazon Q documentation](https://docs.gitlab.com/user/duo_amazon_q/)",[803,805,719,9,717,233],{"slug":2031,"featured":91,"template":698},"gitlab-duo-amazon-q-transform-ideas-into-code-in-minutes","content:en-us:blog:gitlab-duo-amazon-q-transform-ideas-into-code-in-minutes.yml","Gitlab Duo Amazon Q Transform Ideas Into Code In Minutes","en-us/blog/gitlab-duo-amazon-q-transform-ideas-into-code-in-minutes.yml","en-us/blog/gitlab-duo-amazon-q-transform-ideas-into-code-in-minutes",{"_path":2037,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2038,"content":2044,"config":2049,"_id":2051,"_type":14,"title":2052,"_source":16,"_file":2053,"_stem":2054,"_extension":19},"/en-us/blog/gitlab-duo-chat-101-get-more-done-on-gitlab-with-our-ai-assistant",{"title":2039,"description":2040,"ogTitle":2039,"ogDescription":2040,"noIndex":6,"ogImage":2041,"ogUrl":2042,"ogSiteName":685,"ogType":686,"canonicalUrls":2042,"schema":2043},"GitLab Duo Chat 101: Get more done on GitLab with our AI assistant","In this first article in our series learn how Chat can improve developer productivity – for example, by summarizing issues – and how to improve prompts to get better answers faster.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099126/Blog/Hero%20Images/Blog/Hero%20Images/GitLab_Duo_Blog_Hero_1800x945_r2_B%20%281%29_6a2UB7TOQk3JKxyb5yqYtc_1750099126039.png","https://about.gitlab.com/blog/gitlab-duo-chat-101-get-more-done-on-gitlab-with-our-ai-assistant","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Duo Chat 101: Get more done on GitLab with our AI assistant\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Abubakar Siddiq Ango\"}],\n        \"datePublished\": \"2024-05-29\",\n      }",{"title":2039,"description":2040,"authors":2045,"heroImage":2041,"date":2046,"body":2047,"category":801,"tags":2048},[1538],"2024-05-29","GitLab Duo Chat became [generally available](https://about.gitlab.com/blog/gitlab-duo-chat-now-generally-available/) in [GitLab 16.11](https://about.gitlab.com/releases/2024/04/18/gitlab-16-11-released/) and its power as a personal assistant can not be overstated. On a DevSecOps platform, more has to happen than just generating code; planning, discussions, security, compliance, and technical reviews are all critical to developing secure software faster. Issues, epics, merge requests, and other sections of GitLab are where this work happens, with knowledge often buried deep in comment threads. It can take a lot of time to get up to speed on these threads, especially when they've grown to hundreds of comments and interactions and when you've been away from them for a while. This is where GitLab Duo Chat can help.\n\nIn this first part of our GitLab Duo Chat 101 series, we'll introduce you to Chat's capabilities and then dig into how to use Chat to summarize comment threads.\n\n> Live demo! Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Register today](https://about.gitlab.com/seventeen/)!\n\n## GitLab Duo Chat's capabilities\n\nWith Chat, you can refactor [existing code](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html#refactor-code-in-the-ide), learn how a [block of code works](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html#explain-code-in-the-ide), and write [tests](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html#write-tests-in-the-ide) for your code, learn about your issues and epics, and much more. Depending on your prompts, you can make Chat do impressive things that boost developer productivity. In the video below, I showcased how you can use GitLab Duo Chat to interact with GitLab and learn about your issues and epics.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/RJezT5_V6dI?si=XlXGs2DHAYa8Awzs\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## Use cases   \n\nGitLab Duo Chat’s capabilities allow for productivity gains across multiple parts of the software development lifecycle:\n\n- Product and project managers can use Chat’s issues and epics capabilities to gain insights into discussions and plan faster.\n- Developers can create solutions faster with code suggestions and refactoring capabilities. When it comes to working with legacy code or code from other team members, less time is spent on research with the ` /explain` capability providing the necessary insights to understand the code.\n- Quality assurance and test engineers can generate tests and check for vulnerabilities\n- New employees can get a better understanding of their code base and get started solving problems.\n- Beginner programmers can understand and pick up a language or framework quickly and create solutions with Chat providing next steps and insights.\n\n> Check out \"[10 best practices for using GitLab Duo Chat](https://about.gitlab.com/blog/10-best-practices-for-using-ai-powered-gitlab-duo-chat/)\" for tips and tricks to craft AI prompts. \n\n## Summarizing issues\n\nWhen you encounter an issue, especially one with a lot of comments, you skim through the issue description, along with a couple of comments, but can't always get the complete picture of the conversations. GitLab Duo Chat can get you up to speed fast. In the image below, I asked Chat to summarize an issue along with a follow-up question. In two prompts, I got what I needed to understand what is going on in the issue without spending hours reading through the comments.\n\n![Chat summarizing an issue](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099137/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750099137154.png)\n\nYou can use GitLab Duo Chat on the GitLab interface, as well as [the WebIDE, Visual Studio Code, and JetBrains interfaces](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html#use-gitlab-duo-chat-in-the-web-ide).\n\n## Prompts and context\n\nGetting the best responses from any AI tool requires carefully crafting the questions asked. Sometimes, you need to give examples of the responses you expect to prime the large language models (LLMs) toward a desired response. Here are some areas to focus on to get desired responses.\n\n### Context\n\nHere are three prompts with similar objectives but worded differently:\n\n| Prompt 1     | Prompt 2     | Prompt 3    |\n| ---------- | ---------- | ---------- |\n| ![Prompt: Can you summarize this issue's description?](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099137/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750099137154.png)       | ![Prompt: Can you provide a high-level summary of this issue?](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099137/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750099137155.png)      | ![Prompt: Why is this issue popular?](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099137/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750099137156.png)      |\n\nThe context, “this issue,” is common among the three prompts; this tells Chat what resource to use in looking for answers. Prompt 1 gives additional context on what to focus on: the description of the issue. Prompt 2 is not limited in its scope, which means the LLMs will spend more time going through the description and all the comments to provide a more detailed summary of the whole issue. (Note: As of the publication of this blog, there were more than 90 comments in that issue.) Prompt 3 got a poorer response because not much expectation was set for the type of response expected. \n\n[Low-context communication](https://handbook.gitlab.com/handbook/company/culture/all-remote/effective-communication/#understanding-low-context-communication) is critical in crafting your prompt for the best responses, as all information needed for the LLMs to provide an informed response is provided.\n\n### Simplicity\n\nThe wordiness of prompts can sometimes lead to incorrect or no responses. In the image below, you can see that rephrasing a prompt from “Customers have mentioned why this issue is important to them. Can you list the top 3 reasons they mentioned?” to “Why is this issue important to customers?” led to the expected response. When you don’t get the response you desire, simplifying or changing the words used in your prompt can improve the quality of responses.\n\n![Wordy Chat prompts](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099137/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750099137158.png)\n\n### Follow-up questions\n\nGitLab Duo Chat can have follow-up conversations – an essential capability. In the image below, I continued asking how the issue in question can be solved in GitLab's code along with a follow-up question asking for code samples.\n\n![Streamlined Chat prompt shown](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099137/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750099137158.png)\n\nFollow-up questions allow the application to maintain context and provide faster responses. A recommendation is to provide parts of Chat’s previous responses in the next prompt. In the example above, I mentioned “Rails App,” as previously suggested. \n\n## Get started with GitLab Duo Chat \n\nGitLab Duo Chat does more than help you write better code, it helps you navigate through problems and quickly find solutions. With the right prompts and context, you can build secure software faster.\n\n> Want to try GitLab Duo Chat? [Start your free trial](https://about.gitlab.com/gitlab-duo/#free-trial) today.",[803,9,951,719],{"slug":2050,"featured":91,"template":698},"gitlab-duo-chat-101-get-more-done-on-gitlab-with-our-ai-assistant","content:en-us:blog:gitlab-duo-chat-101-get-more-done-on-gitlab-with-our-ai-assistant.yml","Gitlab Duo Chat 101 Get More Done On Gitlab With Our Ai Assistant","en-us/blog/gitlab-duo-chat-101-get-more-done-on-gitlab-with-our-ai-assistant.yml","en-us/blog/gitlab-duo-chat-101-get-more-done-on-gitlab-with-our-ai-assistant",{"_path":2056,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2057,"content":2063,"config":2068,"_id":2070,"_type":14,"title":2071,"_source":16,"_file":2072,"_stem":2073,"_extension":19},"/en-us/blog/gitlab-duo-chat-gets-agentic-ai-makeover",{"title":2058,"description":2059,"ogTitle":2058,"ogDescription":2059,"noIndex":6,"ogImage":2060,"ogUrl":2061,"ogSiteName":685,"ogType":686,"canonicalUrls":2061,"schema":2062},"GitLab Duo Chat gets agentic AI makeover  ","Our new Duo Chat experience, currently an experimental release, helps developers onboard to projects, understand assignments, implement changes, and more.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099203/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2820%29_2bJGC5ZP3WheoqzlLT05C5_1750099203484.png","https://about.gitlab.com/blog/gitlab-duo-chat-gets-agentic-ai-makeover","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Duo Chat gets agentic AI makeover  \",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Torsten Linz\"}],\n        \"datePublished\": \"2025-05-29\",\n      }",{"title":2058,"description":2059,"authors":2064,"heroImage":2060,"date":2065,"body":2066,"category":801,"tags":2067},[1234],"2025-05-29","Generative AI chat assistants have become standard in software development,\nhelping create and fix code just to start. But what if your chat assistant\ncould understand the artifacts of your entire development process, not just\nyour code? What if that chat assistant could help you work through issues\nand project documentation before it helps you write code, and could access\nCI/CD pipelines and merge requests to help you finish coding tasks\nproperly? \n\n\n**Meet the next generation of GitLab Duo Chat – GitLab Duo Agentic Chat, a\nsignificant evolution in AI-native development assistance and the newest\naddition to our platform, now in [experimental\nrelease](https://docs.gitlab.com/policy/development_stages_support/#experiment).**\nGitLab Duo Agentic Chat is currently available as an experimental feature in\nVS Code to all users on GitLab.com that have any one of these add-ons:\nDuo Core, Duo Pro, or Duo Enterprise.\n\n\nAgentic Chat transforms chat from traditional conversational AI to a chat\nexperience that takes action on your behalf, breaking down complex problems\ninto discrete tasks that it can complete. Instead of simply responding to\nquestions with the context you provide, Agentic Chat can:\n\n\n* **Autonomously determine** what information it needs to answer your\nquestions  \n\n* **Execute a sequence of operations** to gather that information from\nmultiple sources  \n\n* **Formulate comprehensive responses** by combining insights from across\nyour project  \n\n* **Create and modify files** to help you implement solutions\n\n\nAnd all of this is done while keeping the human developer within the loop.\n\n\nAgentic Chat is built on the Duo Workflow architecture, which is [currently\nin private\nbeta](https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai/).\nThe architecture comprises agents and tools that take on specific tasks like\nfinding the right context for a given question or editing files. \n\n\n**Use cases for GitLab Duo Agentic Chat**\n\n\nHere are some real-world and common use cases for Agentic Chat:\n\n\n* Onboard to new projects faster by having AI help you familiarize yourself\nwith a new codebase.\n\n\n* Jump into assigned work immediately, even when issue descriptions are\nunclear, because Agentic Chat can help you connect the dots between\nrequirements and existing implementations.\n\n\n* When it's time to make changes, Agentic Chat can handle the implementation\nwork by creating and editing multiple files across your project.\n\n\n* At release time, Agentic Chat can help you verify that your solution\nactually addresses the original requirements by analyzing your merge\nrequests against the initial issue or task.\n\n\n![agentic chat -\nexample](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099210/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750099210429.png)\n\n\n\u003Ccenter>\u003Ci>Agentic Chat making code edits\u003C/i>\u003C/center>\n\n\n## From learning to shipping: A complete workflow demonstration in four\nsteps\n\n\nTo show how Agentic Chat transforms the development experience, let's walk\nthrough a real scenario from our engineering teams. Imagine you're a new\nteam member who's been assigned an issue but knows nothing about the\ncodebase. You can follow along with this video demonstration:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/uG9-QLAJrrg?si=kaOhYylMIaWkIuG8j\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n**Step 1: Understand the project**\n\n\nInstead of manually exploring files and documentation, you can prompt\nAgentic Chat:\n\n\n```unset\n\nI am new to this project. Could you read the project structure and explain\nit to me?\n\n```\n\n\nAgentic Chat provides a comprehensive project overview by:  \n\n- Exploring the directory structure  \n\n- Reading README files and documentation  \n\n- Identifying key components and applications\n\n\n**Step 2: Understand your assigned task**\n\n\nNext, you need to understand your specific assignment, so you can enter this\nprompt:\n\n\n```unset\n\nI have been assigned Issue 1119. Could you help me understand this task,\nspecifically where do I need to apply the refactoring?\n\n```\n\n\nAgentic Chat explains the task and proposes a refactoring approach by:\n\n- Retrieving and analyzing the issue details from the remote GitLab server  \n\n- Examining relevant project files  \n\n- Identifying the specific locations requiring changes\n\n\n**Step 3: Implement the solution**\n\n\nRather than doing the work manually, you can request:\n\n\n```unset\n\nCould you make the edits for me? Please start with steps one, two, three.\n\n```\n\n\nAgentic Chat then:  \n\n- Creates new directories and files as needed \n\n- Extracts and refactors code across multiple locations  \n\n- Ensures consistency across all modified files  \n\n- Provides a summary of all changes made\n\n\n**Step 4: Verify completion**\n\n\nFinally, after creating your merge request, you can verify your work:\n\n\n```unset\n\nDoes my MR fully address Issue 1119? \n\n```\n\n\nAgentic Chat confirms whether all requirements have been met by analyzing\nboth your merge request and the original issue.\n\n\n## Try it today and share your feedback\n\n\nGitLab Duo Agentic Chat is currently available as an experimental feature in VS Code to all users on GitLab.com that have any one of these add-ons: Duo Core, Duo Pro, or Duo Enterprise. See our [setup\ndocumentation](https://docs.gitlab.com/user/gitlab_duo_chat/agentic_chat/)\nfor prerequisites and configuration steps.\n\n\nAs an experimental feature, Agentic Chat has some known limitations we're\nactively addressing, including slower response times due to multiple API\ncalls, keyword-based rather than semantic search, and limited support for\nnew local folders or non-GitLab projects. **Your feedback is crucial in\nhelping us prioritize improvements and bring Agentic Chat to general\navailability so please share your experience in [this\nissue](https://gitlab.com/gitlab-org/gitlab/-/issues/542198).**\n\n\n## What's next?\n\n\nWe are fully focused on improving Agentic Chat, including bringing it to\ngeneral availability. In the meantime, we are aiming to improve response\ntimes and are adding capabilities that GitLab Duo Chat currently has, such\nas using self-hosted models or supporting JetBrains and Visual Studio in\naddition to VS Code. Once we have switched Duo Chat to this new architecture\nwe plan to also bring Agentic Chat to the chat in the GitLab web\napplication. We also plan to add a lot more functionality, such as editing\nGitLab artifacts, supporting context from custom Model Context Protocol, or\nMCP, servers, and offering commands to run in the terminal.\n\n\n> Ready to experience autonomous development assistance but not yet a GitLab\ncustomer? Try Agentic Chat today as part of [a free trial of GitLab\nUltimate with Duo Enterprise](https://about.gitlab.com/free-trial/) and help\nshape the future of AI-powered development. Follow these [setup steps for VS\nCode](https://docs.gitlab.com/user/gitlab_duo_chat/agentic_chat/#use-agentic-chat-in-vs-code).\n\n>\n\n> And make sure to join the GitLab 18 virtual launch event to learn about\nour agentic AI plans and more. [Register\ntoday!](https://about.gitlab.com/eighteen/)\n\n\n***Disclaimer: This blog contains information related to upcoming products,\nfeatures, and functionality. It is important to note that the information in\nthis blog post is for informational purposes only. Please do not rely on\nthis information for purchasing or planning purposes. As with all projects,\nthe items mentioned in this blog and linked pages are subject to change or\ndelay. The development, release, and timing of any products, features, or\nfunctionality remain at the sole discretion of GitLab.***\n\n\n## Learn more\n\n\n- [GitLab Duo Workflow: Enterprise visibility and control for agentic\nAI](https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai/)\n\n- [What is agentic AI?](https://about.gitlab.com/topics/agentic-ai/)\n\n- [Agentic AI guides and\nresources](https://about.gitlab.com/blog/agentic-ai-guides-and-resources/)\n",[803,1133,695,9,717,719],{"slug":2069,"featured":91,"template":698},"gitlab-duo-chat-gets-agentic-ai-makeover","content:en-us:blog:gitlab-duo-chat-gets-agentic-ai-makeover.yml","Gitlab Duo Chat Gets Agentic Ai Makeover","en-us/blog/gitlab-duo-chat-gets-agentic-ai-makeover.yml","en-us/blog/gitlab-duo-chat-gets-agentic-ai-makeover",{"_path":2075,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2076,"content":2082,"config":2087,"_id":2089,"_type":14,"title":2090,"_source":16,"_file":2091,"_stem":2092,"_extension":19},"/en-us/blog/gitlab-duo-chat-now-generally-available",{"title":2077,"description":2078,"ogTitle":2077,"ogDescription":2078,"noIndex":6,"ogImage":2079,"ogUrl":2080,"ogSiteName":685,"ogType":686,"canonicalUrls":2080,"schema":2081},"GitLab Duo Chat, your at-the-ready AI assistant, is now generally available","Explore a real-world example of how our versatile conversational interface for GitLab Duo helps developers onboard faster and write secure code more efficiently.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749671806/Blog/Hero%20Images/GitLab-Duo-Chat-Thumbnail.png","https://about.gitlab.com/blog/gitlab-duo-chat-now-generally-available","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Duo Chat, your at-the-ready AI assistant, is now generally available\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Torsten Linz\"},{\"@type\":\"Person\",\"name\":\"Laurena Alves\"}],\n        \"datePublished\": \"2024-04-18\",\n      }",{"title":2077,"description":2078,"authors":2083,"heroImage":2079,"date":2084,"body":2085,"category":801,"tags":2086},[1234,864],"2024-04-18","GitLab Duo Chat is now generally available in GitLab 16.11, bringing a broad range of AI features together into a single easy-to-use, natural language chat experience. Chat provides real-time guidance for engineering and non-engineering users across the entire software development lifecycle — supporting teams in a wide range of tasks, from understanding code faster and boosting collaboration to quickly learning how to use GitLab.\n\nToday, we are also making the following GitLab Duo capabilities, which are accessible in Chat, generally available:\n- **Code explanation** helps developers understand unfamiliar code.\n- **Code refactoring** enables developers to improve and modernize existing code.\n- **Test generation** automates the writing of tests for functions and methods and helps teams catch bugs sooner.\n\nGitLab Duo Chat is available within the GitLab user interface, including GitLab’s Web IDE, as well as within popular IDEs, including VS Code and the JetBrains suite of IDEs. Chat can also be extended by adding custom tools to help users more accurately complete tasks unique to their organization.\n\nGitLab's [2023 State of AI in Software Development Report](https://about.gitlab.com/developer-survey/#ai) found that developers spend 75% of their time on tasks other than writing code, including understanding and improving existing code, maintaining code, and testing — and AI, when leveraged across the entire software development lifecycle, will drive efficiencies across all these areas. The report also found that over three-quarters of DevSecOps professionals said they are concerned about AI tools having access to private information or intellectual property. By providing teams with a single entry point into AI for the entire software development process, Chat helps teams boost efficiency without sacrificing security, privacy, or code quality. \n\nIn this blog, we’ll explore an example of how you can use Chat to jump into a new project and immediately start adding value.\n\n## Get up to speed, fast\nAs a developer during onboarding, you can ask Chat for help with general knowledge such as understanding CI/CD, the difference between an issue and an epic, how to reset your GitLab password, or how to get started with popular frameworks. With Chat, you have an assistant ready to answer all of your onboarding questions, and soon you’re ready to dig into your first project.\n\n\u003Ccenter>\n\u003Cfigure>\n   \u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752175955/Blog/tottp2iwt85d7grkytpc.png\" alt=\"Chat GA blog CI/CD explanation\" width=\"300\">\n\u003C/figure>\n\u003C/center>\n\u003Cp>\u003C/p>\n\n## Accelerate development\nYour first task is to help the Product team update the product UI to include totals for the prices of all products in the inventory.\n\nTo better understand where to add the new functionality, you can ask Chat to explain a portion of the application and get a quick, easy-to-understand explanation of how the code works — without context-switching. Then, as you start developing the new feature, you can use GitLab Duo Code Suggestions to write code more efficiently. GitLab Duo lets you stay in flow by predictively completing code blocks and proposing common code, all in the same environment where you’re already coding.\n\nFinally, you spot a few lines of code that can be simplified, so you ask Chat to refactor the code. After just a few minutes, you have a better understanding of how your application works — and you have new, high-quality code to implement the functionality the Product team is looking for.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/933806295?autoplay=1&loop=1&autopause=0&background=1&muted=1\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Help Center Video\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\u003Cp>\u003C/p>\n\n## Stay secure\nBefore you merge any of your new code, you’ll need to run some tests to ensure everything works as expected. You can ask Chat to create unit tests for a Java class you modified. You then paste the unit tests into a new file. You notice that the vulnerability scanner for infrastructure as code has not been enabled, so you ask Chat about it and quickly append the scanner to the project pipeline.\n\nNow you can merge your code with confidence.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/933806352?autoplay=1&loop=1&autopause=0&background=1&muted=1\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Help Center Video\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\u003Cp>\u003C/p>\n\n> Explore tips and tricks for [integrating GitLab Duo Chat into your AI-powered DevSecOps workflows](https://about.gitlab.com/blog/10-best-practices-for-using-ai-powered-gitlab-duo-chat/).\n\n## Adopt AI with guardrails\nLuckily, while you’ve been busy onboarding, getting up to speed on your first project, and merging your first lines of code, your organization can rest assured that Chat is built with privacy in mind and doesn't use customer data to train AI models. In addition, GitLab Duo AI access controls enable organizations to control sensitive data at the project, sub-group, and group levels by enabling or disabling AI read access.\n\n> Visit the [GitLab AI Transparency Center](https://about.gitlab.com/ai-transparency-center/) to explore how GitLab implements governance and transparency in GitLab Duo.\n\n## Get started with GitLab Duo Chat today\nWhether you’re a developer or you’re managing the entire team, GitLab Duo Chat can empower you to take advantage of AI exactly where you need it throughout the software development lifecycle — all while helping you maintain code quality and security guardrails. GitLab Duo Pro, including code explanation, code refactoring, and test generation, is now available to GitLab Premium and Ultimate customers for $19 USD per user per month.\n\n![GitLab Duo Pro price card](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749677968/Blog/Content%20Images/GitLab_Pricing_Guide_PRO_820px_B.png)\n\n> [Start a free trial of GitLab Duo and get started using Chat today.](https://about.gitlab.com/gitlab-duo/#free-trial)\n\n## What’s next for GitLab Duo \nWe are building [GitLab Duo](https://about.gitlab.com/gitlab-duo/) to empower your teams to develop software faster, secure applications more efficiently, better manage software pipelines, and enhance team collaboration. We are excited to share that several GitLab Duo features currently in Beta will soon be released into general availability. Key enhancements include improved AI-powered security capabilities, such as detailed vulnerability explanations and automated resolution processes, as well as summarization and templating tools to help teams improve collaboration in issues, merge requests, and code reviews with concise AI-powered summaries and auto-population of templates.\n\nIn line with our privacy-first approach to AI, we will also continue to introduce new ways for organizations to tailor GitLab Duo to their specific needs with two upcoming capabilities:\n\n- **Model personalization** will enable organizations to leverage the full potential of AI in a way that aligns closely with their strategic goals, operational needs, and customer expectations.\n- **Self-hosted model deployment** will help organizations ensure that data doesn't leave their secure environment, reducing the risk of breaches and ensuring compliance with data protection regulations.\n\n> Visit the [GitLab Duo documentation](https://docs.gitlab.com/ee/user/ai_features.html) for a complete list of generally available, Beta, and Experimental features.",[803,717,9,1133],{"slug":2088,"featured":91,"template":698},"gitlab-duo-chat-now-generally-available","content:en-us:blog:gitlab-duo-chat-now-generally-available.yml","Gitlab Duo Chat Now Generally Available","en-us/blog/gitlab-duo-chat-now-generally-available.yml","en-us/blog/gitlab-duo-chat-now-generally-available",{"_path":2094,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2095,"content":2101,"config":2106,"_id":2108,"_type":14,"title":2109,"_source":16,"_file":2110,"_stem":2111,"_extension":19},"/en-us/blog/gitlab-duo-enterprise-is-now-available",{"title":2096,"description":2097,"ogTitle":2096,"ogDescription":2097,"noIndex":6,"ogImage":2098,"ogUrl":2099,"ogSiteName":685,"ogType":686,"canonicalUrls":2099,"schema":2100},"GitLab Duo Enterprise is now available","Organizations have an end-to-end AI partner for faster, more secure software development. Learn how GitLab Duo Enterprise supports the entire DevSecOps lifecycle.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749665660/Blog/Hero%20Images/Untitled__1800_x_945_px_.png","https://about.gitlab.com/blog/gitlab-duo-enterprise-is-now-available","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Duo Enterprise is now available\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"David DeSanto, Chief Product Officer, GitLab\"}],\n        \"datePublished\": \"2024-09-03\",\n      }",{"title":2096,"description":2097,"authors":2102,"heroImage":2098,"date":2103,"body":2104,"category":801,"tags":2105},[1869],"2024-09-03","[GitLab Duo Enterprise]( https://about.gitlab.com/gitlab-duo/), now available, is an end-to-end AI partner designed for the entire software development lifecycle. This powerful suite of AI tools is designed to boost developer productivity, enhance security, streamline collaboration, and accelerate your DevSecOps processes.\n\nKey features at a glance:\n- Intelligent code assistance across 25+ programming languages\n- AI-powered security vulnerability details and resolution\n- Automated test generation and root cause analysis\n- Team collaboration enhancements with AI-driven summaries\n- ROI quantification through an AI Impact Dashboard\n\n## Why we developed GitLab Duo Enterprise\n\nAs organizations aim to deliver better software faster and create customer value, they encounter significant challenges that slow their progress. [Our research](http://about.gitlab.com/developer-survey/2024/ai) shows that 95% of organizations are either evaluating or using AI in the software development lifecycle. However, 55% of survey respondents said they feel using AI for software development is risky.\n\nCommon pain points in the enterprise include suboptimal developer experience and productivity, increasing security and compliance demands, inefficient collaboration across teams, and difficulty in assessing the ROI of AI technology investments. GitLab Duo Enterprise addresses these challenges head-on, providing a secure, efficient, and powerful AI partner for your development teams. \n\n**Let's explore how GitLab Duo Enterprise can transform the way your company creates and deploys software.**\n\n## Boost developer productivity with intelligent code assistance\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1004252678?h=83f35171b6&amp;badge=0&amp;badge=0&amp?autoplay=1&loop=1&autopause=0&background=1&muted=1\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Code Suggestions clip\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\u003Cp>\u003C/p>\n\nOne of the primary hurdles in software development is the time-consuming nature of routine coding tasks. Get to the most impactful work faster with:\n\n- __Code Suggestions__ supports more than 25 programming languages. This AI-powered tool accelerates code creation, improves code quality, and reduces the time spent on boilerplate tasks.\n\nBut it's not just about writing new code. \n\n- GitLab Duo Enterprise's __Code Explanation__ capability enables developers to quickly understand complex or unfamiliar code, while \n\n- **Code Refactoring** enables developers to [improve and modernize existing code](https://about.gitlab.com/blog/refactor-code-into-modern-languages-with-ai-powered-gitlab-duo/). \n\n- __Test Generation__ automates the creation of comprehensive unit tests. The result? Developers can focus on high-value tasks that drive innovation, leading to faster development cycles and improved software quality.\n\n> See how [European tech company Cube](https://about.gitlab.com/customers/cube/) uses Code Suggestions, Test Generation, and other GitLab Duo features to achieve improvements in speed and efficiency. \n\n## Enhance team collaboration and communication\n\nEffective collaboration is the cornerstone of successful software development, yet it's often hindered by lengthy discussions, complex merge requests, and time-consuming code reviews. GitLab Duo Enterprise addresses these challenges with its suite of summarization and templating tools:\n- __Discussion Summary:__ Allows team members to quickly get up to speed on lengthy conversations in issues\n- __Merge Request Summaries:__ Provide clear, concise overviews of proposed changes. \n- __Code Review Summaries:__ Streamline the review process, enabling better handoffs between authors and reviewers. \n\nBy facilitating clearer communication and faster decision-making, GitLab Duo Enterprise helps teams work more efficiently and deliver results more quickly.\n\n## Streamline troubleshooting and debugging\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1004252688?h=fc6c048bfd&amp;badge=0&amp;badge=0&amp?autoplay=1&loop=1&autopause=0&background=1&muted=1\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Root Cause Analysis clip\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\u003Cp>\u003C/p>\n\nWhen development pipelines fail, the impact on project timelines can be significant. GitLab Duo Enterprise's __Root Cause Analysis__ feature is a game-changer here. By automatically analyzing logs and providing detailed explanations of failures along with potential fixes, Root Cause Analysis significantly reduces the time spent on troubleshooting.\n\nThe benefits extend beyond just time savings. With [faster resolution of CI/CD build issues](https://about.gitlab.com/blog/developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd/), teams can maintain momentum, reduce downtime, and ultimately deliver software updates more frequently and reliably.\n\n## Elevate security across the development lifecycle\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1004252706?h=73e568b89c&amp;badge=0&amp;badge=0&amp?autoplay=1&loop=1&autopause=0&background=1&muted=1\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Vulnerability Explanation and Resolution clip\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\u003Cp>\u003C/p>\n\nCybersecurity threats are ever-present, so robust application security is a necessity. GitLab Duo Enterprise rises to this challenge with its __Vulnerability Explanation__ and __Vulnerability Resolution__ features. These AI-powered tools help [developers fully understand security vulnerabilities](https://about.gitlab.com/blog/developing-gitlab-duo-use-ai-to-remediate-security-vulnerabilities/) and then automatically generate merge requests with suggested fixes.\n\n## Quantify AI impact for strategic decision-making\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1004252663?h=d35106288b&amp;badge=0&amp?autoplay=1&loop=1&autopause=0&background=1&muted=1\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"AI Impact Dashboard clip\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\u003Cp>\u003C/p>\n\nDemonstrating the ROI of technology investments is crucial. GitLab Duo Enterprise addresses this need head-on with its __AI Impact Dashboard__. This analytics tool, built on top of Value Stream Analytics and DORA4 metrics, provides clear metrics on cycle time improvements and increased deployment frequencies, allowing organizations to quantify the tangible benefits of AI adoption in their development processes.\n\nBy offering insights into how AI usage correlates with key productivity metrics, the [AI Impact Dashboard](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/) empowers leadership to make data-driven decisions about resource allocation and strategic technology investments.\n\n## Embrace the future of AI-powered DevSecOps\n\nAs we unveil GitLab Duo Enterprise, we're proud to announce that GitLab has been recognized as a Leader in the inaugural [Gartner® Magic Quadrant™ for AI Code Assistants](https://about.gitlab.com/gartner-mq-ai-code-assistants/). This recognition underscores our commitment to delivering AI solutions that drive real business value.\n\nThe future of software development is here, and it's powered by AI. We're here to help you incorporate intelligent, scalable AI throughout the DevSecOps lifecycle so you can deliver results faster for your customers.\n\n> [Get started today with GitLab Duo Enterprise with a free  trial!](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/?type=free-trial&toggle=gitlab-duo-pro)\n",[803,717,9,695,1133],{"slug":2107,"featured":91,"template":698},"gitlab-duo-enterprise-is-now-available","content:en-us:blog:gitlab-duo-enterprise-is-now-available.yml","Gitlab Duo Enterprise Is Now Available","en-us/blog/gitlab-duo-enterprise-is-now-available.yml","en-us/blog/gitlab-duo-enterprise-is-now-available",{"_path":2113,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2114,"content":2119,"config":2126,"_id":2128,"_type":14,"title":2129,"_source":16,"_file":2130,"_stem":2131,"_extension":19},"/en-us/blog/gitlab-duo-pro",{"title":2115,"description":2116,"ogTitle":2115,"ogDescription":2116,"noIndex":6,"ogImage":859,"ogUrl":2117,"ogSiteName":685,"ogType":686,"canonicalUrls":2117,"schema":2118}," GitLab Duo Pro combines Code Suggestions, Chat, and organizational controls","AI-powered code completion, code generation, chat, and per-user assignment come to the GitLab DevSecOps Platform as a secure add-on.","https://about.gitlab.com/blog/gitlab-duo-pro","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \" GitLab Duo Pro combines Code Suggestions, Chat, and organizational controls\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Hillary Benson\"},{\"@type\":\"Person\",\"name\":\"Dave Steer\"}],\n        \"datePublished\": \"2024-01-17\",\n      }",{"title":2115,"description":2116,"authors":2120,"heroImage":859,"date":2123,"body":2124,"category":801,"tags":2125},[2121,2122],"Hillary Benson","Dave Steer","2024-01-17","> __This blog has been updated to include GitLab Duo Chat (in Beta) as part of the GitLab Duo Pro add-on.__\n\nLast year, we launched GitLab Duo Code Suggestions into general availability as the cornerstone of our GitLab Duo suite of AI capabilities that power DevSecOps workflows. We received incredibly positive feedback from our customers on the ease of use and effectiveness of Code Suggestions' code completion and code generation capabilities. \n\nNow, we are introducing [GitLab Duo Pro](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/), a new package that brings together Code Suggestions, Chat, and organizational control capabilities, ensuring that teams can take advantage of AI exactly where they need it throughout the software development lifecycle.\n\n> [Get started with GitLab Duo Pro](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/) \n\nGitLab Duo Pro, which is available to Ultimate and Premium customers as an add-on, boosts developer efficiency and effectiveness by decreasing the time required to write and understand code. With GitLab Duo Pro, organizations can set up their DevSecOps teams for success by giving them the AI-assisted tools they need to develop secure code faster, improve collaboration, and reduce the security and compliance risks of AI adoption.\n\nGitLab Duo Pro features:\n- Code Suggestions: AI-powered code completion and code generation\n- Chat (Beta): Provides real-time guidance on coding, refactoring, and test generation\n- Privacy-first approach: Your code stays your code — GitLab does not use it for training or fine-tuning AI models\n- Organizational controls: GitLab Duo Pro enables organizations to have greater control over AI by limiting AI usage to approved users only\n\nGitLab Duo Pro is available across SaaS, self-managed, and Dedicated deployments.\n\n## What is GitLab Duo Pro?\n\nWith GitLab Duo Pro, developers can access [Code Suggestions](https://about.gitlab.com/solutions/code-suggestions/) to generate blocks of code from single- and multi-line comments as well as comment blocks. Code Suggestions also autocompletes lines of code from a few typed characters — improving cycle times by securely taking care of repetitive, routine coding tasks.\n\nGitLab Duo Pro also includes [Chat](https://about.gitlab.com/blog/gitlab-duo-chat-beta/) (Beta), which is helpful for technical and non-technical users across the entire software development lifecycle. Chat assists in explaining unfamiliar code, suggesting and generating tests, and simplifying code. You can also use Chat to write code from scratch interactively. Whether you are onboarding to GitLab or you are already an expert, learning how to use GitLab is streamlined with Chat.\n\nOrganizational controls are provided in GitLab Duo Pro for better management of AI capabilities, including per-user assignment, so only approved users can use AI.\n\nGitLab Duo Pro is built with privacy as a critical foundation. Private, non-public customer code stored in GitLab is not used for training or fine-tuning AI models. Learn about [data usage when using GitLab Duo Pro Code Suggestions](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/index.html#code-suggestions-data-usage).\n\n## Get GitLab Duo Pro today\n\nGitLab Duo Pro is currently available at a special introductory price of $9 USD per user/month to Ultimate and Premium customers. Beginning on February 1, 2024, GitLab Duo Pro will be available for $19 USD per user/month. [Contact us today](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/) to get started with GitLab Duo Pro.\n",[803,9,951],{"slug":2127,"featured":91,"template":698},"gitlab-duo-pro","content:en-us:blog:gitlab-duo-pro.yml","Gitlab Duo Pro","en-us/blog/gitlab-duo-pro.yml","en-us/blog/gitlab-duo-pro",{"_path":2133,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2134,"content":2140,"config":2145,"_id":2147,"_type":14,"title":2148,"_source":16,"_file":2149,"_stem":2150,"_extension":19},"/en-us/blog/gitlab-duo-self-hosted-enterprise-ai-built-for-data-privacy",{"title":2135,"description":2136,"ogTitle":2135,"ogDescription":2136,"noIndex":6,"ogImage":2137,"ogUrl":2138,"ogSiteName":685,"ogType":686,"canonicalUrls":2138,"schema":2139},"GitLab Duo Self-Hosted: Enterprise AI built for data privacy","Customers in regulated industries can now deploy GitLab Duo on self-managed infrastructure, leveraging the power of generative AI while helping to address data residency and privacy concerns.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097840/Blog/Hero%20Images/Blog/Hero%20Images/Self-Hosted%201800x945_1dL1II2ITh2PteObA9DBLD_1750097839679.png","https://about.gitlab.com/blog/gitlab-duo-self-hosted-enterprise-ai-built-for-data-privacy","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Duo Self-Hosted: Enterprise AI built for data privacy\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Susie Bitters\"},{\"@type\":\"Person\",\"name\":\"Aathira Nair\"}],\n        \"datePublished\": \"2025-02-27\",\n      }",{"title":2135,"description":2136,"authors":2141,"heroImage":2137,"date":1316,"body":2143,"category":801,"tags":2144},[1419,2142],"Aathira Nair","We are excited to announce the general availability of GitLab Duo Self-Hosted for Code Suggestions and Chat. An optional capability for self-managed customers with a GitLab Duo Enterprise subscription, GitLab Duo Self-Hosted supports deployment flexibility across multiple platforms, including on-premises infrastructure or in private clouds and secure cloud environments through AWS Bedrock and Azure OpenAI. GitLab Duo Self-Hosted empowers teams to innovate with AI while helping them maintain control over sensitive data and intellectual property. \n\nSecurity concerns have been a major barrier to AI adoption in regulated industries. In our [Global DevSecOps Survey](http://about.gitlab.com/developer-survey/2024/ai), more than half of the respondents said that introducing AI into the software development lifecycle is risky. With [GitLab Duo](https://about.gitlab.com/gitlab-duo/), we gave organizations a way to ship more secure software faster with AI throughout the entire software development lifecycle.\n\nGitLab Duo Self-Hosted expands the availability of GitLab Duo AI features to organizations with stringent data privacy requirements, offering flexibility in both AI large language model (LLM) selection and deployment options. The earliest adopters of GitLab Duo Self-Hosted include organizations in the public sector and regulated industries  – e.g., financial services, automotive, and healthcare. These organizations seek to gain the competitive advantage of AI by integrating AI-powered development tools into their environments, while also giving security teams the control they need.\n\nAs one U.S. government agency says: “After selecting GitLab as the cornerstone of our agency-wide DevSecOps platform, we chose GitLab Duo Self-Hosted to further advance our software factory capabilities. GitLab Duo’s ability to operate in air-gapped environments and provide granular control over our data was crucial to delivering secure AI-powered features. This unified approach streamlines our workflow and strengthens security, allowing us to leverage AI for increased productivity while meeting strict compliance requirements.” \n\n![GitLab Duo Self-Hosted models](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097848/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097848329.png)\n\n## Architect secure AI deployments\n\nGitLab Duo Self-Hosted enables GitLab Duo features that leverage a curated selection of leading AI LLMs, including those from Anthropic, Mistral, and OpenAI. Here are the LLMs supported by GitLab today:\n\n* On-premises - Mistral models with the vLLM serving platform  \n* AWS - Mistral and Anthropic Claude 3.5 Sonnet via AWS Bedrock  \n* Microsoft Azure - OpenAI GPT models via Azure AI\n\nWe are evaluating more models to support in the near future. [Learn more about the LLMs we support.](https://docs.gitlab.com/ee/administration/self_hosted_models/supported_models_and_hardware_requirements.html#approved-llms)\n\nGitLab Duo Self-Hosted deployment options include on-premises installations powered by the open-source vLLM framework, as well as private-cloud deployments via services like AWS Bedrock and Microsoft Azure AI. This flexibility helps organizations to architect AI solutions that align with their unique security, compliance, and performance requirements.\n\n## Simplify AI/ML implementation\n\nGitLab Duo's AI abstraction layer standardizes and simplifies the integration of the chosen LLM to a feature, mitigating the burden of implementing AI/ML technologies. This enables companies to streamline their AI adoption efforts and enhance the developer experience, free from the complexities of integrating and maintaining multiple tools.\n\n![GitLab Duo Self-Hosted AI-powered features](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097848/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097848330.png)\n\n## Maintain control of sensitive data\n\nBy isolating your GitLab instance, AI gateway, and LLMs in your own environment or country of choice, GitLab Duo Self-Hosted makes it possible that sensitive data and intellectual property remain within your designated perimeter. Granular control over data locality helps enable adherence to strict data residency regulations, while adopting AI capabilities in secure settings. Whether you use GitLab Duo Self-Hosted in a completely air-gapped environment with vLLM or leverage a supported private cloud, you can control all aspects of the deployment to include the geographic location of components. By eliminating the reliance on external APIs and providing full visibility into all request and response logs, GitLab Duo Self-Hosted helps even the most regulated organizations confidently adopt AI capabilities and meet the most stringent compliance obligations.\n\n**Start an interactive tour of GitLab Self-Hosted by clicking on the image below:**\n\n[![GitLab Duo Self-Hosted tour screenshot](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097848/Blog/Content%20Images/Blog/Content%20Images/Screenshot_2025-02-20_at_7.00.34_AM_aHR0cHM6_1750097848332.png)](https://gitlab.navattic.com/gitlab-duo-self-hosted)\n\n## Get started with GitLab Duo Self-Hosted today\n\nIf you're ready to advance your AI journey while addressing security and data privacy, [reach out to us](https://about.gitlab.com/sales/) to help set up GitLab Duo Self-Hosted in your environment today.",[803,695,9,717,1133],{"slug":2146,"featured":91,"template":698},"gitlab-duo-self-hosted-enterprise-ai-built-for-data-privacy","content:en-us:blog:gitlab-duo-self-hosted-enterprise-ai-built-for-data-privacy.yml","Gitlab Duo Self Hosted Enterprise Ai Built For Data Privacy","en-us/blog/gitlab-duo-self-hosted-enterprise-ai-built-for-data-privacy.yml","en-us/blog/gitlab-duo-self-hosted-enterprise-ai-built-for-data-privacy",{"_path":2152,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2153,"content":2158,"config":2163,"_id":2165,"_type":14,"title":2166,"_source":16,"_file":2167,"_stem":2168,"_extension":19},"/en-us/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws",{"title":2154,"description":2155,"ogTitle":2154,"ogDescription":2155,"noIndex":6,"ogImage":1454,"ogUrl":2156,"ogSiteName":685,"ogType":686,"canonicalUrls":2156,"schema":2157},"GitLab Duo with Amazon Q: Agentic AI optimized for AWS generally available","The comprehensive AI-powered DevSecOps platform combined with the deepest set of cloud computing capabilities speeds dev cycles, increases automation, and improves code quality.","https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Duo with Amazon Q: Agentic AI optimized for AWS generally available\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Emilio Salvador\"}],\n        \"datePublished\": \"2025-04-17\",\n      }",{"title":2154,"description":2155,"authors":2159,"heroImage":1454,"date":2160,"body":2161,"category":801,"tags":2162},[1131],"2025-04-17","Today, we're excited to announce the general availability of [GitLab Duo with Amazon Q](https://about.gitlab.com/partners/technology-partners/aws/), delivering agentic AI throughout the software development lifecycle for AWS customers. GitLab Duo with Amazon Q, based on GitLab Ultimate, includes many familiar features such as code completion, code explanation, code generation, chat, and vulnerability explanation and resolution – all of which are now powered by Amazon Q. It is available with a Self-Managed deployment model for customers on Amazon Web Services (AWS).\n\nWith Amazon Q's agents directly embedded into GitLab's DevSecOps platform, developers maintain their familiar development environment while gaining powerful AI capabilities. The result is a frictionless experience that helps accelerate development cycles, reduce manual effort, and enhance code quality.\n\n“Participating in the early access program for GitLab Duo with Amazon Q has given us a glimpse into its transformative potential for our development workflows,” said Osmar Alonso, DevOps Engineer, Volkswagen Digital Solutions. “Even in its early stages, we saw how the deeper integration with autonomous agents could streamline our process, from code commit to production. We're excited to see how this technology empowers our team to focus on innovation and accelerate our digital transformation.\"\n\n## Agentic AI comes to complex customer environments\n\nBy combining agentic AI with secure, reliable cloud infrastructure, GitLab and AWS bring built-in security, scale, and reliability to complex customer environments, enabling them to realize the following benefits:\n\n__Unified developer experience for streamlined development__\n\nDevelopers can interact with Amazon Q through the GitLab Duo Chat interface from their preferred IDE or the GitLab web interface. This eliminates the need for context switching in other tools and helps developers stay focused on the project that they’re working on.\n\n__One solution for the entire software development lifecycle__\n\nCode suggestions and optimizations leverage AWS-specific patterns and practices, while testing tools understand AWS service interactions and dependencies. A common data store across all stages provides essential context to AI agents, enabling complete visibility and traceability for relevant actions.\n\n__Secure development with enterprise-grade guardrails__\n\nEnd-to-end security and compliance are built directly into the development platform with guardrails that help reduce risk without impeding velocity. This secure software development approach enforces transparency and auditability through AI agents while seamlessly integrating with AWS security services and compliance frameworks.\n\n## How to start using GitLab Duo with Amazon Q\n\nHere are five initial use cases we’re targeting to help teams build secure software faster with agentic AI: \n\n1. **Feature development acceleration** - Create issue descriptions, generate implementation plans based on your existing codebase, and produce complete merge requests ready for review. This drives feature delivery acceleration while maintaining consistency with internal development standards.  \n2. **Legacy application modernization** - Analyze your legacy Java codebase, create a comprehensive upgrade plan, and generate a merge request with all necessary code changes. This unlocks faster Java upgrade time, while providing a clear audit trail of all code transformations. Support for .NET and other languages is planned for future releases.  \n3. **Quality assurance enhancement** - Analyze code and automatically create comprehensive unit tests that understand your application logic and AWS service interactions. This increases test coverage, reduces manual test writing effort, and helps ensure consistent test quality across applications.  \n4. **Code review optimization** - Provide inline feedback on code changes, suggesting improvements based on development standards, highlighting security and performance considerations. This enables reduced code review cycles and delivery of higher-quality code merges for deployment.  \n5. **Vulnerability remediation** - Explain detected vulnerabilities in clear, detailed terms and one-click remediation based on recommended code changes, helping to significantly reduce the time from detection to remediation.\n\nWatch GitLab Duo with Amazon Q in action:\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1075753390?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Technical Demo: GitLab Duo with Amazon Q\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n> #### Get the benefits of GitLab Duo with Amazon Q today\n> GitLab's unified, AI-powered DevSecOps platform with Amazon Q's advanced AI capabilities provides AWS customers with a solution that transforms how teams build and deploy software. To learn more about GitLab Duo with Amazon Q visit us at an upcoming [AWS Summit in a city near you](https://about.gitlab.com/events/aws-summits/) or [reach out to your GitLab representative](https://about.gitlab.com/partners/technology-partners/aws/#form).",[803,9,805,717,695,1133],{"slug":2164,"featured":91,"template":698},"gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws","content:en-us:blog:gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws.yml","Gitlab Duo With Amazon Q Agentic Ai Optimized For Aws","en-us/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws.yml","en-us/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws",{"_path":2170,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2171,"content":2176,"config":2181,"_id":2183,"_type":14,"title":2184,"_source":16,"_file":2185,"_stem":2186,"_extension":19},"/en-us/blog/gitlab-duo-with-amazon-q-devsecops-meets-agentic-ai",{"title":2172,"description":2173,"ogTitle":2172,"ogDescription":2173,"noIndex":6,"ogImage":1454,"ogUrl":2174,"ogSiteName":685,"ogType":686,"canonicalUrls":2174,"schema":2175},"GitLab Duo with Amazon Q: DevSecOps meets agentic AI","AI-powered DevSecOps enhanced with autonomous AI agents accelerates developer productivity, application modernization, and innovation.","https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-devsecops-meets-agentic-ai","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Duo with Amazon Q: DevSecOps meets agentic AI\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Emilio Salvador\"}],\n        \"datePublished\": \"2024-12-03\",\n      }",{"title":2172,"description":2173,"authors":2177,"heroImage":1454,"date":2178,"body":2179,"category":801,"tags":2180},[1131],"2024-12-03","We're excited to announce GitLab Duo with Amazon Q, a joint offering that brings together GitLab's comprehensive AI-powered DevSecOps platform with Amazon Q's autonomous AI agents in a single, integrated solution.\n\nGitLab Duo with Amazon Q transforms software development by integrating powerful AI agents directly into your daily workflows. Instead of switching between multiple tools, developers can now accelerate key tasks — from feature development to code reviews — all from within GitLab's comprehensive DevSecOps platform. Amazon Q’s AI agents act as intelligent assistants, automating time-consuming tasks like generating code from requirements, creating unit tests, conducting code reviews, and modernizing Java applications. By handling these complex tasks, this joint offering helps teams focus on innovation, while maintaining security and quality standards.\n\nThis enterprise-class developer experience includes:\n* The GitLab unified platform with one single data store, which automates the building, testing, packaging, and deployment of secure code\n* GitLab Duo, enhanced with Amazon Q developer, which leverages GitLab project context to generate multi-file changes based on the task\n* Amazon Q AI agents integrated with GitLab Duo, updating issues and creating merge requests per task, with permission scoped to the project\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1033653810?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"GitLab Duo and Amazon Q\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n## Partnership innovation: GitLab and AWS\n\nGitLab Duo with Amazon Q is the result of close collaboration between GitLab and AWS engineering teams, combining our strengths to transform software development. This partnership unites GitLab's expertise in unified DevSecOps with AWS's leadership in cloud computing, creating an innovative solution that understands how developers work.\n\nBy integrating Amazon Q's autonomous agents with GitLab's comprehensive AI-powered platform, we've built more than a technical integration. We've created an experience that makes AI-powered development feel natural and upholds the security, compliance, and reliability that enterprises require.\n\nIndustry analysts recognize the significance of this integration in advancing AI-powered software development:\n\n***\"With this joint offering, GitLab and AWS are combining their strengths to make agentic AI a reality in software development,\" said Katie Norton, Research Manager at IDC. \"GitLab Duo with Amazon Q addresses strong use cases and critical challenges that empower customers to harness the full potential of AI.\"***\n\n***\"Both developers and the organizations they work for are increasingly interested in simplified and unified experiences,\" says Rachel Stephens, senior analyst at RedMonk. \"Especially in the era of AI – when security and privacy are paramount concerns – organizations want to both harness the power of cutting edge technology while also controlling risk and minimizing disjointed software tool chains. The partnership between GitLab Duo and Amazon Q seeks to give developers the tools they need within the context of an end-to-end DevSecOps experience.\"***\n\n## 4 key customer benefits \n\nGitLab Duo with Amazon Q pairs AI-powered DevSecOps with the deepest set of cloud computing capabilities. Together, they help development teams:\n\n### 1. Streamline feature development from idea to code \n\nDevelopment teams often spend hours translating requirements into code, leading to slower delivery and inconsistent implementation. You can now invoke the GitLab Duo with Amazon Q agent by utilizing a new quick action `/q dev`, which will convert an issue description directly into merge-ready code in minutes. The agent analyzes requirements, plans the implementation, and generates a complete merge request — all while adhering to your team's development standards. Teams can iterate rapidly using feedback in comments, significantly reducing the time from idea to production.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1034050110?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Feature Dev with Rev\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n### 2. Modernize legacy code without the headache \n\nUpgrading Java applications traditionally requires weeks of careful planning, manual code changes, and extensive testing. By using quick action `/q transform`, you can change this by automating the entire Java modernization process. In minutes, not hours, the agent analyzes your Java 8 or 11 codebase, creates a comprehensive upgrade plan, and generates fully documented merge requests for Java 17 migration. Every change is tracked and traceable, giving teams confidence while improving application security and performance.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1034050145?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"QCT\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n### 3. Accelerate code reviews without sacrificing quality \n\nCode reviews often create bottlenecks: Teams wait days for feedback yet must maintain consistent standards. With the `/q review` quick action, you get instant, intelligent feedback on code quality and security directly in merge requests. By automatically identifying potential issues and suggesting improvements based on your standards, teams can maintain high-quality code while dramatically reducing review cycles.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1034050136?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Code Reviews\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n### 4. Automate testing to ship with confidence\n\nManual test creation is time-consuming and often leads to inconsistent coverage across teams. With the `/q test` quick action, you can automatically generate comprehensive unit tests that understand your application logic. The agent ensures thorough coverage of critical paths and edge cases, matching your existing testing patterns. This automation helps teams catch issues earlier and maintain consistent quality standards, saving valuable developer time.\n\n\u003Cdiv style=\"padding:54.37% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1034050181?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"Use GitLab Duo with Amazon Q to add tests\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n## Enterprise-grade security and guardrails included\n\nBuilt for enterprise scale and security, this offering combines GitLab's integrated security, compliance, and privacy with Amazon Q's AI agent, accelerating developer workflows to help organizations ship secure software faster.\n\nThe integration features:\n\n* Built-in guardrails that maintain development velocity  \n* Granular controls for AI-powered features at user, project, and group levels  \n* End-to-end security integration with existing workflows\n\nDevSecOps teams can securely scale the development environment with the world's most broadly adopted cloud.\n\n## What's next\n\nGitLab Duo with Amazon Q builds on our existing integration with [AWS announced in May 2024](https://press.aboutamazon.com/2024/4/aws-announces-general-availability-of-amazon-q-the-most-capable-generative-ai-powered-assistant-for-accelerating-software-development-and-leveraging-companies-internal-data), representing a significant step forward in our joint mission to transform software development. This deeper integration of AI capabilities marks the beginning of our expanded collaboration with AWS. As we continue to evolve these capabilities, we'll focus on:\n\n* Extending AI features across the development lifecycle  \n* Enhancing developer productivity  \n* Meeting enterprise development demands at scale\n\n**GitLab Duo with Amazon Q is available today on a [public branch](https://gitlab.com/groups/gitlab-org/-/epics/16059) in the GitLab.org project. To get access to a preview and learn more about how it can transform your software development process, visit [our website](https://about.gitlab.com/partners/technology-partners/aws/#interest).**",[1133,805,803,9,233],{"slug":2182,"featured":91,"template":698},"gitlab-duo-with-amazon-q-devsecops-meets-agentic-ai","content:en-us:blog:gitlab-duo-with-amazon-q-devsecops-meets-agentic-ai.yml","Gitlab Duo With Amazon Q Devsecops Meets Agentic Ai","en-us/blog/gitlab-duo-with-amazon-q-devsecops-meets-agentic-ai.yml","en-us/blog/gitlab-duo-with-amazon-q-devsecops-meets-agentic-ai",{"_path":2188,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2189,"content":2195,"config":2201,"_id":2203,"_type":14,"title":2204,"_source":16,"_file":2205,"_stem":2206,"_extension":19},"/en-us/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai",{"title":2190,"description":2191,"ogTitle":2190,"ogDescription":2191,"noIndex":6,"ogImage":2192,"ogUrl":2193,"ogSiteName":685,"ogType":686,"canonicalUrls":2193,"schema":2194},"GitLab Duo Workflow: Enterprise visibility and control for agentic AI","Secure, autonomous, context-aware AI agents take on complex tasks, freeing developers to ship innovative software faster. Private beta waitlist now open.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749660174/Blog/Hero%20Images/Workflow_1800x945.png","https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Duo Workflow: Enterprise visibility and control for agentic AI\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Pini Wietchner\"}],\n        \"datePublished\": \"2025-02-24\",\n      }",{"title":2190,"description":2191,"authors":2196,"heroImage":2192,"date":2198,"body":2199,"category":801,"tags":2200},[2197],"Pini Wietchner","2025-02-24","Today, we're excited to announce the opening of the waitlist for the [private beta of GitLab Duo Workflow](https://about.gitlab.com/gitlab-duo/agent-platform/): **agentic AI built on top of the most comprehensive DevSecOps platform.** The next step in our AI roadmap, GitLab Duo Workflow will help development teams navigate everything from project bootstrapping to deployment processes, from debugging issues to cross-team coordination, all within the IDE.\n\nGitLab Duo Workflow leverages the GitLab platform's structure for collaboration, continuous integration, continuous deployment, security, and compliance to help organizations as they accelerate their development process with AI agents.\n\nUse GitLab Duo Workflow to help you:\n* [bootstrap a new development project](#from-slow-project-setup-to-a-running-start)\n* [modernize code](#from-legacy-code-to-modern-applications)\n* [perform contextual tasks](#from-context-switching-to-flow-state)\n* [create documentation](#from-stale-docs-to-dynamic-knowledge)\n* [enhance test coverage](#from-patchy-to-comprehensive-testing)\n* and more\n\nThis is just the beginning. With GitLab’s unified data store, the more you use GitLab, the more context GitLab Duo Workflow has about your code, configurations, security findings, and deployment practices. The result: an increasingly powerful development experience that's tailored to your organization.\n\n## The promise and challenge of AI agents\n\nSoftware has fundamentally changed the world, but only a tiny fraction of the world's population has the skills to build software today. Yet, these developers reach billions of people with smartphones and internet connections. Just imagine a world where *more* people can build, secure, and deliver production-ready software – there will be an explosion of innovation as more people can create software that impacts billions. **Agentic AI will make that happen.**\n\nAI agents understand context, maintain knowledge of entire codebases, and actively collaborate on complex software projects across development, security, and operations. With AI agents, developers can create software at a scale previously unimaginable for individuals or even teams.\n\nBut this shift raises important questions about visibility, control, and how AI will impact developers' work. Organizations need to ensure AI enhances their developers' capabilities while enabling them to maintain oversight of their development process. The key to success isn't just adopting AI – it's adopting it in a way that empowers developers while preserving security, compliance, and governance.\n\n## AI's success depends on your platform, not more add-on tools\n\nWhen you're working with more developers, code, and potential security risks, adding separate tools for each new challenge only creates more complexity. Our most recent [DevSecOps Survey](https://about.gitlab.com/the-source/platform/devops-teams-want-to-shake-off-diy-toolchains-a-platform-is-the-answer/) shows just how serious this problem is: DevSecOps teams are juggling up to 14 different tools, with professionals spending up to 80% of their time on non-coding tasks. For AI to be truly effective, it also needs high-quality, unified data. That's hard to achieve with disparate tools.\n\n**The GitLab DevSecOps platform combined with GitLab AI agents** brings everything together in a single data model that encapsulates source code, merge requests, epics, users, access rights, and more. The agents we're building use context about users and projects to standardize how teams work and automate the non-coding tasks that absorb developer time, such as scanning for security issues and enforcing compliance rules. When AI is built directly into the platform, these capabilities become even more powerful, turning AI agents into development partners while keeping you in control of how AI enhances the process.\n\n**This isn't a far-off future — it's what we're building right now with GitLab Duo Workflow.**\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1059060959?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"GitLab Duo Workflow, the future of secure agentic AI software development\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>`\n\n## GitLab Duo Workflow: AI agents on the most comprehensive DevSecOps platform\n\nLeveraging GitLab's end-to-end DevSecOps platform, GitLab Duo Workflow helps developers work at their highest potential. While AI coding assistants help with individual pieces of code, GitLab Duo Workflow will understand your entire development lifecycle – automating routine tasks so developers can focus on strategic innovation and creative problem-solving. As we develop GitLab Duo Workflow, here’s what it will be able to help teams achieve:\n\n### From slow project setup to a running start\n\nDevelopers spend precious time configuring new projects, managing dependencies, and setting up basic infrastructure instead of building new features. With GitLab Duo Workflow, you can **automate project bootstrapping directly in the IDE**, providing the right configurations from the start so you can focus on innovation sooner.\n\n### From legacy code to modern applications\n\nModernizing legacy code is more than just updating syntax — it requires understanding dependencies, tests, CI/CD pipelines, and documentation. GitLab Duo Workflow helps **modernize your codebase by handling code refactoring** – from code to tests.\n\n### From context switching to flow state\n\nToday, developers constantly switch between tools, docs, and codebases to solve problems. GitLab Duo Workflow will help **resolve tasks with the full context of your codebase-related issues and merge requests**, letting developers stay in their flow.\n\n### From stale docs to dynamic knowledge\n\nDocumentation becomes stale quickly, making codebases harder to understand and maintain. GitLab Duo Workflow **supports developers in generating and updating documentation**, including README files, code flow diagrams, and architecture documentation.\n\n### From patchy to comprehensive testing\n\nAs codebases grow, maintaining comprehensive test coverage becomes increasingly challenging. GitLab Duo Workflow **can generate tests for entire sections of your codebase** while integrating with your existing test infrastructure, ensuring more reliable software with less effort.\n\n## Sign up for the private beta waitlist\n\n[Sign up for the GitLab Duo Workflow private beta waitlist](https://about.gitlab.com/gitlab-duo/agent-platform/) to see the next step in our vision for secure agentic AI – from project setup to deployment. Built on GitLab's DevSecOps platform, these agents understand your entire software lifecycle while maintaining the enterprise-grade security and control organizations require.\n\n*Disclaimer: This page contains information about upcoming products, features, and functionality. This information is for informational purposes only and should not be relied upon for purchasing or planning. All items are subject to change or delay, and the development, release, and timing remain at GitLab Inc.'s sole discretion.*",[9,803,695,717,1133,888],{"slug":2202,"featured":91,"template":698},"gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai","content:en-us:blog:gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai.yml","Gitlab Duo Workflow Enterprise Visibility And Control For Agentic Ai","en-us/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai.yml","en-us/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai",{"_path":2208,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2209,"content":2214,"config":2219,"_id":2221,"_type":14,"title":2222,"_source":16,"_file":2223,"_stem":2224,"_extension":19},"/en-us/blog/gitlab-enterprise-agile-planning-add-on-for-all-roles",{"title":2210,"description":2211,"ogTitle":2210,"ogDescription":2211,"noIndex":6,"ogImage":1715,"ogUrl":2212,"ogSiteName":685,"ogType":686,"canonicalUrls":2212,"schema":2213},"Beyond Devs: GitLab Enterprise Agile Planning add-on for all roles","Our new add-on helps you bring non-technical users into the same DevSecOps platform where engineers build, test, secure, and deploy code.","https://about.gitlab.com/blog/gitlab-enterprise-agile-planning-add-on-for-all-roles","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Beyond Devs: GitLab Enterprise Agile Planning add-on for all roles\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Justin Farris\"},{\"@type\":\"Person\",\"name\":\"Melissa Ushakov\"}],\n        \"datePublished\": \"2023-11-16\",\n      }",{"title":2210,"description":2211,"authors":2215,"heroImage":1715,"date":1499,"body":2217,"category":1053,"tags":2218},[2216,1720],"Justin Farris","Bringing teams together supercharges collaboration. Now you can take this a step further with a new GitLab Enterprise Agile Planning add-on that helps you bring non-technical users into the same DevSecOps platform where engineers build, test, secure, and deploy code.\n\nThe add-on enables you to support cross-team collaboration between developers and non-developers without having to purchase full GitLab licenses for non-engineering team members.\n\nWith additional Agile Planning seats ([$15 per user/month](https://about.gitlab.com/pricing/)), non-engineering team members can participate in planning workflows, measure software delivery velocity and impact with Value Stream Analytics, and utilize executive dashboards to drive organizational visibility. \n\n> This new Enterprise Agile Planning add-on is available to GitLab Ultimate subscriptions. Please contact your [GitLab sales representative](https://about.gitlab.com/sales/) for more information.\n\nAgile planning is most powerful when it extends its reach beyond developers to include stakeholders like product managers, designers, marketing, and sales teams. Here are three ways GitLab can bring value to your entire software development process:\n\n## 1. Scale up as your team grows\n\nGitLab's Enterprise Agile Planning capabilities are designed to grow with your organization. As your teams expand, so do your needs for Agile planning capabilities and cross-team collaboration. As a unified platform with a single data store, GitLab provides different stakeholders within the organization with a curated view of project status, enabling teams to track progress, identify bottlenecks, and make informed decisions. GitLab supports multi-year and cross-team planning workflows, enabling you to do strategic and tactical planning within a single tool. These efficient cross-team workflows help projects move faster from concept to delivery, ensuring a competitive edge in today’s fast-paced digital landscape.\n\n![EpicTree](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749677087/Blog/Content%20Images/EpicTree.png)\n\n> Migrating from Jira? [Learn more here](https://about.gitlab.com/blog/tips-for-a-successful-jira-to-gitlab-migration/).\n\n## 2. Bring compliance closer to development \n\nGitLab ensures that compliance checks and enforcements are automated and seamlessly integrated into software development, making the process more efficient and transparent, and less error-prone. In GitLab, compliance checks like [approval rules](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/rules.html), [compliance pipelines](https://docs.gitlab.com/ee/user/group/compliance_frameworks.html#compliance-pipelines), and [deployment approvals](https://docs.gitlab.com/ee/ci/environments/deployment_approvals.html) are closer to the code as part of developers’ natural workflow, rather than a separate workflow or within a separate issue-tracking tool. With GitLab, reviewers can more easily review the updates in the context of the change and developers can receive more rapid feedback. \n\n![Approvals rules](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749677088/Blog/Content%20Images/Approvals_rules.png)\n\n> Learn more about [GitLab Enterprise Agile Planning](https://about.gitlab.com/solutions/agile-delivery/).\n\n## 3. Eliminate time-consuming configurations\n\nMany Agile planning tools are highly customizable. That can be great for building highly complex and sophisticated workflows, but that complexity can also be a maintenance hazard. Spending time on tool maintenance limits innovation and creates friction in your team’s day-to-day work. GitLab prioritizes convention over configuration to deliver exceptional user experiences that work as-is by default while also giving flexible configuration options so that teams can spend less time on maintenance and more time on innovation. A simplified planning tool within the DevSecOps platform where code is built and delivered drives collaboration, transparency, and trust across all roles, leading to a better cross-team experience to build software faster. \n\n> Are you ready for the Atlassian Server end of life? Learn more about [the benefits of migrating from Atlassian to GitLab](https://about.gitlab.com/move-to-gitlab-from-atlassian/). \n\n## Get started with GitLab Enterprise Agile Planning\n\nIn true GitLab fashion, we continue to [rapidly iterate on our Enterprise Agile Planning capabilities](https://about.gitlab.com/direction/plan/). When you’re ready to get started, [we’re here to help](https://about.gitlab.com/sales/).\n",[908,951,9],{"slug":2220,"featured":91,"template":698},"gitlab-enterprise-agile-planning-add-on-for-all-roles","content:en-us:blog:gitlab-enterprise-agile-planning-add-on-for-all-roles.yml","Gitlab Enterprise Agile Planning Add On For All Roles","en-us/blog/gitlab-enterprise-agile-planning-add-on-for-all-roles.yml","en-us/blog/gitlab-enterprise-agile-planning-add-on-for-all-roles",{"_path":2226,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2227,"content":2233,"config":2240,"_id":2242,"_type":14,"title":2243,"_source":16,"_file":2244,"_stem":2245,"_extension":19},"/en-us/blog/gitlab-leader-forrester-wave-integrated-software-delivery-platforms",{"title":2228,"description":2229,"ogTitle":2228,"ogDescription":2229,"noIndex":6,"ogImage":2230,"ogUrl":2231,"ogSiteName":685,"ogType":686,"canonicalUrls":2231,"schema":2232},"GitLab named Leader in The Forrester Wave Integrated Software Delivery Platforms 2023","The Forrester report recognized GitLab for its roadmap, which includes supply chain security, enhanced UI, granular security and compliance controls, and pipeline security.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749682752/Blog/Hero%20Images/Forrestercoverimage.png","https://about.gitlab.com/blog/gitlab-leader-forrester-wave-integrated-software-delivery-platforms","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab named Leader in The Forrester Wave Integrated Software Delivery Platforms 2023\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"GitLab\"}],\n        \"datePublished\": \"2023-06-06\",\n      }",{"title":2228,"description":2229,"authors":2234,"heroImage":2230,"date":2235,"body":2236,"category":2237,"tags":2238},[822],"2023-06-06","\n\nDemand for a platform approach to software delivery is increasing as organizations realize the inefficiencies and costs of stitched-together solutions for software delivery, siloed visibility, broken feedback loops, and increased risk of cyberattacks. We recognized the value of the platform approach early on — and we believe that GitLab's single-application DevSecOps Platform is the best way for organizations to improve developer productivity, build high-performing teams, secure the software supply chain, and implement cloud transformations. \n\n## GitLab’s DevSecOps Platform recognized\n![Your image alt text](https://about.gitlab.com/images/blogimages/forresterwave2.png){: .shadow.small.left.wrap-text} In its evaluation, and in the first year of this report, Forrester has named GitLab as the only **Leader** in **The Forrester WaveTM: Integrated Software Delivery Platforms, Q2 2023**. The report evaluated 13 integrated software delivery platform (ISDP) vendors across 26 criteria based on current offering, strategy, and market presence. GitLab scored the highest possible in the criteria of platform-incorporated security tools test automation, roadmap, community, and pricing flexibility and transparency.\n\nWe are excited to see the market mature and recognize the value of an integrated software delivery platform — a strategy GitLab has followed from the start. Our DevSecOps platform is offered as a single application with a unified data store, increasing efficiency and collaboration and providing value unmatched by traditional vendors and complex toolchains. It provides essential automation needed by various teams in the software delivery lifecycle, along with security and governance needed by security professionals. We also integrate artificial intelligence (AI) throughout the SDLC by incorporating it into our comprehensive enterprise DevSecOps platform.\n\n> Download [The Forrester Wave: Integrated Software Delivery Platforms, Q2 2023 report](https://page.gitlab.com/forrester-wave-integrated-software-delivery-platforms-2023.html).\n\nRecognizing our leadership and continued innovation, the report emphasizes that GitLab “has led the industry towards consolidated ISDPs. GitLab's strategy includes an on-par vision to deliver an excellent developer experience without sacrificing security or compliance.... GitLab is great for enterprises wishing to consolidate their best-of-breed toolchain into one, high-performing ISDP.”\n\n> “GitLab is far ahead of its competitors and provides one product which offers an easy-to-set-up, easy-to-start product with all these capabilities integrated,” says **Daniel Widerin, Head of Software Delivery, Hilti**\n\n## Roadmap gets high scores\n\nThe Forrester report recognized GitLab for its roadmap and focus on community. “[GitLab’s] roadmap gets leading scores and includes enhanced supply chain security, enhanced UI, granular security and compliance controls, and pipeline security – all things enterprises need.”\n\nThe research firm added: “[GitLab’s] innovation is also good, going beyond traditional developers to include AI/ML engineering. GitLab is an open core product that not only invests heavily in the open source software (OSS) community but also enables its customers to contribute to the product, earning it high scores for community.”\n\nGitLab is trusted by more than 30 million users and more than 50% of Fortune 100 organizations. We will continue to focus on integrating transformative technologies into our DevSecOps Platform, such as AI, into all parts of the software delivery lifecycle, software supply chain security, and value stream analytics, to enable customers to accelerate and secure software development and delivery.\n\n> Download [The Forrester Wave: Integrated Software Delivery Platforms, Q2 2023 report](https://page.gitlab.com/forrester-wave-integrated-software-delivery-platforms-2023.html).\n\n","insights",[1133,2239,9],"research",{"slug":2241,"featured":6,"template":698},"gitlab-leader-forrester-wave-integrated-software-delivery-platforms","content:en-us:blog:gitlab-leader-forrester-wave-integrated-software-delivery-platforms.yml","Gitlab Leader Forrester Wave Integrated Software Delivery Platforms","en-us/blog/gitlab-leader-forrester-wave-integrated-software-delivery-platforms.yml","en-us/blog/gitlab-leader-forrester-wave-integrated-software-delivery-platforms",{"_path":2247,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2248,"content":2254,"config":2261,"_id":2263,"_type":14,"title":2264,"_source":16,"_file":2265,"_stem":2266,"_extension":19},"/en-us/blog/gitlab-leader-gartner-magic-quadrant-devops-platforms",{"title":2249,"description":2250,"ogTitle":2249,"ogDescription":2250,"noIndex":6,"ogImage":2251,"ogUrl":2252,"ogSiteName":685,"ogType":686,"canonicalUrls":2252,"schema":2253},"GitLab named Leader in 2023 Gartner DevOps Platform Quadrant","In the first Gartner® Magic Quadrant™ for this category, GitLab is positioned highest on the Ability to Execute axis.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663830/Blog/Hero%20Images/gartner-report-blog-asset.jpg","https://about.gitlab.com/blog/gitlab-leader-gartner-magic-quadrant-devops-platforms","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab named a Leader in the 2023 Gartner Magic Quadrant for DevOps Platforms\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Ashley Kramer\"}],\n        \"datePublished\": \"2023-06-07\",\n      }",{"title":2255,"description":2250,"authors":2256,"heroImage":2251,"date":2258,"body":2259,"category":2237,"tags":2260},"GitLab named a Leader in the 2023 Gartner Magic Quadrant for DevOps Platforms",[2257],"Ashley Kramer","2023-06-07","\nToday marks an important milestone for DevOps and for GitLab. \n\nGartner® recognized GitLab as a Leader in the 2023 Gartner® Magic Quadrant™ for DevOps Platforms – the first Magic Quadrant for the category – positioned highest on the Ability to Execute axis. According to Gartner, Leaders execute well against their current vision and are well-positioned for tomorrow. \n\nSince our founding, we have been focused on delivering the most comprehensive suite of solutions for every use case – and for every stakeholder - in developing and deploying software. These solutions come together as a comprehensive platform that eliminates point solution tool sprawl and a ‘Do it Yourself’ DevOps approach. GitLab brings together everyone involved in the software development lifecycles – development teams, security teams, operations teams – to collaborate together on the same platform. \n\nWe believe Gartner naming GitLab a Leader in the Magic Quadrant for DevOps Platforms is a recognition of our success in both creating a comprehensive software development and delivery platform, and our role in helping mature the DevOps Platform category so that it is ready for mainstream technology adoption. \n\n![2023 Gartner® Magic Quadrant™ for DevOps Platforms](https://about.gitlab.com/images/blogimages/gartnermqfigure1.png){: .shadow}\n\nGitLab’s goal is to help our customers deliver software faster. We do this by improving developer productivity, increasing operational efficiency, securing the software supply chain, and accelerating their digital transformation. Today, GitLab is the most comprehensive AI-powered DevSecOps platform. \n\n> Download the [2023 Gartner Magic Quadrant for DevOps Platforms](http://about.gitlab.com/gartner-magic-quadrant).\n\n### Reducing complexity and increasing operational efficiency \nWe focus on reducing production risks through automation. With a best-in-class CI/CD solution, GitLab empowers teams to build and test every change as well as create scalable and repeatable software delivery processes. Our platform eliminates the complexity of sprawling toolchains, preventing context switching, reducing cognitive load, improving developer satisfaction, and driving operational efficiencies across organizations. \n\n### Shifting left with embedded security \nGitLab helps organizations meet [the need for speed and security](/the-source/ai/velocity-with-guardrails-ai-automation/) throughout the software supply chain because security is embedded within the software development lifecycle rather than bolted on as an afterthought. GitLab enables teams to automate policy enforcement, compliance frameworks, and security testing, which frees up resources. We continue to innovate in security. In the last quarter alone, we’ve introduced capabilities that support centralized policy management; expand our compliance reports, controls, and dashboards; and support default [SLSA Level 3 attestations](/direction/supply-chain/#frameworks).\n\n### Driving action with insights and metrics \nGitLab helps customers understand and analyze every aspect of the software delivery process. We are innovating on [value stream management](/solutions/value-stream-management/) through a unified data store, [tracking of DORA metrics](https://docs.gitlab.com/ee/user/analytics/dora_metrics.html), value stream dashboards, and value stream analytics – all designed to give stakeholders a unique and useful view into the end-to-end software delivery value stream. Organizations can now visualize and manage DevSecOps workflows – from ideation to delivery – to gain insight into how digital transformation and technology investments are delivering value and driving business results.\n\n### Embedding AI throughout the software development lifecycle\nGitLab is an [AI-powered DevSecOps platform](/solutions/ai/). We adopt a privacy-first approach, ensuring that organizations can be confident their intellectual property is safe within our infrastructure. We integrate AI throughout the software development lifecycle to improve cycle time, from code creation and testing to security and deployment.\n\n### Empowering innovation with open core \nGitLab is built on an open core model, enabling us to be on the leading edge of innovation. Every year, our customers and the community at-large contribute hundreds of new capabilities to our DevSecOps platform. Through our feedback issues and publicly available roadmaps, we continue to stay close to our community and invite everyone to help improve our platform. \n\nOn behalf of the GitLab team, we are honored to be named a Leader by Gartner in the 2023 Gartner Magic Quadrant for DevOps Platforms. We will continue to innovate every day to make DevSecOps even more effective for our customers and to achieve our mission to make it so [everyone can contribute](/company/mission/). \n\n> Download the [2023 Gartner Magic Quadrant for DevOps Platforms](http://about.gitlab.com/gartner-magic-quadrant).\n\n*Gartner, Magic Quadrant for DevOps Platforms, Manjunath Bhat, Thomas Murphy, Joachim Herschmann, Daniel Betts, Chris Saunderson, Hassan Ennaciri, Bill Holz, Peter Hyde, 05 June 2023* \n\n*GARTNER is a registered trademark and service mark of Gartner, Inc. and/or its affiliates in the U.S. and internationally, and MAGIC QUADRANT is a registered trademark of Gartner, Inc. and/or its affiliates and are used herein with permission. All rights reserved.*\n\n*Gartner does not endorse any vendor, product or service depicted in its research publications, and does not advise technology users to select only those vendors with the highest ratings or other designation. Gartner research publications consist of the opinions of Gartner’s research organization and should not be construed as statements of fact. Gartner disclaims all warranties, expressed or implied, with respect to this research, including any warranties of merchantability or fitness for a particular purpose.*\n\n*This graphic was published by Gartner Inc. as part of a larger report and should be evaluated in the context of the entire document. The Gartner document is available upon request from Gartner B.V.*\n",[1133,2239,9],{"slug":2262,"featured":6,"template":698},"gitlab-leader-gartner-magic-quadrant-devops-platforms","content:en-us:blog:gitlab-leader-gartner-magic-quadrant-devops-platforms.yml","Gitlab Leader Gartner Magic Quadrant Devops Platforms","en-us/blog/gitlab-leader-gartner-magic-quadrant-devops-platforms.yml","en-us/blog/gitlab-leader-gartner-magic-quadrant-devops-platforms",{"_path":2268,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2269,"content":2275,"config":2282,"_id":2284,"_type":14,"title":2285,"_source":16,"_file":2286,"_stem":2287,"_extension":19},"/en-us/blog/gitlab-named-a-leader-in-the-2024-gartner-magic-quadrant-for-devops",{"title":2270,"description":2271,"ogTitle":2270,"ogDescription":2271,"noIndex":6,"ogImage":2272,"ogUrl":2273,"ogSiteName":685,"ogType":686,"canonicalUrls":2273,"schema":2274},"GitLab named 2024 Gartner DevOps Platforms Quadrant leader","GitLab is positioned highest in Ability to Execute and Completeness of Vision, which we believe is recognition of our customers’ success and our continued innovation in the DevOps category.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662523/Blog/Hero%20Images/Gartner_DevOps_Blog_Post_Cover_Image_1800x945__2_.png","https://about.gitlab.com/blog/gitlab-named-a-leader-in-the-2024-gartner-magic-quadrant-for-devops","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab named a Leader in the 2024 Gartner Magic Quadrant for DevOps Platforms\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Ashley Kramer\"}],\n        \"datePublished\": \"2024-09-05\",\n      }",{"title":2276,"description":2271,"authors":2277,"heroImage":2272,"date":2278,"body":2279,"category":1133,"tags":2280},"GitLab named a Leader in the 2024 Gartner Magic Quadrant for DevOps Platforms",[2257],"2024-09-05","DevOps was originally just a concept, a methodology for delivering software faster by bringing traditionally disparate teams together. It was a response to all the issues caused by the separation of those who built software and those who deployed it.\n\nAt GitLab, we iterated on that concept: Instead of stitching together tools to create a complex DevOps toolchain, a [single DevOps platform](https://about.gitlab.com/platform/) would result in tighter collaboration, greater automation, and more scalable and standardized processes.\n\nWe believe that strategy, which focuses on our customers' success, was correct. In the second iteration of the [Gartner Magic Quadrant for DevOps Platforms](https://about.gitlab.com/gartner-magic-quadrant/), we are once again named a Leader by Gartner and this time, positioned highest on both axes: Ability to Execute and Completeness of Vision.\n\n![Gartner MQ for DevOps Platforms 2024 image](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674334/Blog/Content%20Images/figure1.png)\n\n> Download the [2024 Gartner® Magic Quadrant™ for DevOps Platforms report](https://about.gitlab.com/de-de/gartner-magic-quadrant/).\n\nToday’s software organizations must contend with increasing security threats, complex compliance requirements, and carefully adopting new technologies such as generative AI. This is in addition to simply delivering on their promises of scalable services and continued innovation to their own customers.\n\nGitLab helps our customers face these challenges and become leaders in their own industries. With our AI-powered DevSecOps platform, they are shifting security left, enabling visibility throughout the development lifecycle, and bringing together all the roles and responsibilities needed to deliver the software that powers our world.\n\n## Furthering the DevOps vision\n\nOur work here isn’t done. We will continue to innovate on the DevOps vision and advance our DevSecOps platform in two ways.\n\nFirst, we want to invite even more teams to collaborate on the same platform, with specific features for those involved in [Agile planning](https://about.gitlab.com/blog/categories/agile-planning/), [data science](https://about.gitlab.com/topics/devops/the-role-of-ai-in-devops/), and [observability and application monitoring](https://docs.gitlab.com/operations/observability/).\n\nSecond, we strive to make our platform adoption and deployment options even more flexible to meet our customers’ diverse needs. This includes investing in [GitLab Dedicated](https://about.gitlab.com/dedicated/), our single-tenant, hosted option, so companies in highly regulated industries can have the simplicity of SaaS and the power of all the latest features and capabilities, while adhering to the compliance needs of isolated infrastructure.\n\n## Helping organizations build secure software\n\nBeyond building a better collaboration platform for delivering software, one of the most important things we do at GitLab is help organizations build more secure and compliant software. Our vision here sets us apart, as GitLab integrates [security scanning](https://about.gitlab.com/solutions/security-compliance/) at the point of code commit, not when applications are ready for release. This helps teams catch vulnerabilities sooner, leading to faster release cycles. GitLab also makes compliance easy with policy guardrails and automatically generating [a software bill of materials](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/).\n\nWe know our customers face more security threats as their own software surface attack area increases. This is why, in the next 12 months, we plan to continue improving our SAST scanners, add additional policy controls, and build [an upcoming native secrets manager](https://about.gitlab.com/blog/gitlab-native-secrets-manager-to-give-software-supply-chain-security-a-boost/).\n\n## Leading with AI throughout the SDLC\n\nOur vision is to also be a leader in AI – both in enabling our customers to build innovative software with AI, and also to do it with privacy-first AI technology. AI represents a generational leap forward with an incredible amount of opportunity when integrated throughout the software development lifecycle. As we innovate, we are doing so responsibly. We’ve heard our customers’ concerns loud and clear: They want [AI with guardrails](https://about.gitlab.com/the-source/ai/velocity-with-guardrails-ai-automation/), [AI that’s transparent](https://about.gitlab.com/ai-transparency-center/), and AI that respects their code and intellectual property.\n\nWe are committed to building [GitLab Duo](https://about.gitlab.com/gitlab-duo/), a suite of AI-powered features for our DevSecOps platform that are all of these: comprehensive, privacy-first, and built to support the entire software development lifecycle.\n\nWe believe this commitment and our GitLab Duo features are why, recently, [Gartner® also named us a Leader in its first Magic Quadrant™ for AI Code Assistants](https://about.gitlab.com/blog/gitlab-named-a-leader-in-2024-gartner-magic-quadrant-for-ai-code-assistants/).\n\nWe are honored by this recognition and see it as a sign to continue listening to you  –  our customers – because that is what drives our vision, product roadmap, and commitment in delivering the best DevSecOps platform.\n\n> Download the [2024 Gartner® Magic Quadrant™ for DevOps Platforms report](https://about.gitlab.com/gartner-magic-quadrant/).\n\n***Source: Gartner, Magic Quadrant for DevOps Platforms, Keith Mann, Thomas Murphy, Bill Holz, George Spafford, August 2024***\n\n***GARTNER is a registered trademark and service mark of Gartner, Inc. and/or its affiliates in the U.S. and internationally, and MAGIC QUADRANT is a\nregistered trademark of Gartner, Inc. and/or its affiliates and are used herein with permission. All rights reserved.***\n\n***Gartner does not endorse any vendor, product or service depicted in its research publications, and does not advise technology users to select only those vendors with the highest ratings or other designation. Gartner research publications consist of the opinions of Gartner’s research organization and should not be construed as statements of fact. Gartner disclaims all warranties, expressed or implied, with respect to this research, including any warranties of merchantability or fitness for a particular purpose.***\n\n***This graphic was published by Gartner Inc. as part of a larger report and should be evaluated in the context of the entire document. The Gartner document is available upon request from Gartner.***",[1133,2239,9,2281,951],"DevOps",{"slug":2283,"featured":91,"template":698},"gitlab-named-a-leader-in-the-2024-gartner-magic-quadrant-for-devops","content:en-us:blog:gitlab-named-a-leader-in-the-2024-gartner-magic-quadrant-for-devops.yml","Gitlab Named A Leader In The 2024 Gartner Magic Quadrant For Devops","en-us/blog/gitlab-named-a-leader-in-the-2024-gartner-magic-quadrant-for-devops.yml","en-us/blog/gitlab-named-a-leader-in-the-2024-gartner-magic-quadrant-for-devops",{"_path":2289,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2290,"content":2296,"config":2300,"_id":2302,"_type":14,"title":2303,"_source":16,"_file":2304,"_stem":2305,"_extension":19},"/en-us/blog/gitlab-named-a-leader-in-the-forrester-wave-devops-platforms-q2-2025",{"title":2291,"description":2292,"ogTitle":2291,"ogDescription":2292,"noIndex":6,"ogImage":2293,"ogUrl":2294,"ogSiteName":685,"ogType":686,"canonicalUrls":2294,"schema":2295},"GitLab named a Leader in The Forrester Wave™: DevOps Platforms, Q2 2025","Forrester calls GitLab platform the \"most all-in-one of the all-in-one solutions,\" adding it \"suits enterprises looking to standardize with a single purchase.\"","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749658898/Blog/Hero%20Images/blog-post-image-forrester-wave-1800x945px-fy26.png","https://about.gitlab.com/blog/gitlab-named-a-leader-in-the-forrester-wave-devops-platforms-q2-2025","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab named a Leader in The Forrester Wave™: DevOps Platforms, Q2 2025\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Dave Steer\"}],\n        \"datePublished\": \"2025-06-02\",\n      }",{"title":2291,"description":2292,"authors":2297,"heroImage":2293,"date":799,"body":2298,"category":717,"tags":2299},[2122],"Choosing a DevSecOps platform is one of the biggest technology decisions enterprises make. That's why we are thrilled to be named a [**Leader in The Forrester Wave™: DevOps Platforms, Q2 2025**](https://about.gitlab.com/forrester-wave-devops-platform/), receiving the highest scores possible across the criteria our customers tell us they care about most, including day zero experience, developer tooling, build automation and CI, deployment automation, AI risk mitigation, AI infusion, directly incorporated security tools, and platform cohesion.\n\n***\"GitLab is the most all-in-one of the all-in-one solutions and suits enterprises looking to standardize with a single purchase.” -*** Forrester Wave™: DevOps Platforms, Q2 2025\n\nFor us, this recognition reflects what we've been hearing from customers: They need to deliver secure software faster, but existing solutions force them to compromise on speed, security, or simplicity. GitLab delivers all three. And with our [GitLab 18.0 release](https://about.gitlab.com/releases/2025/05/15/gitlab-18-0-released/) in May, we’ve taken this a step further by [including AI-native GitLab Duo capabilities](https://about.gitlab.com/blog/gitlab-premium-with-duo/) — such as test generation, code suggestions, and code refactoring — directly in GitLab Premium and GitLab Ultimate at no additional cost.\n\n> [Access the report today!](https://about.gitlab.com/forrester-wave-devops-platform/)\n\n![ Forrester Wave™: DevOps Platforms, Q2 2025 graphic ](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673518/Blog/Content%20Images/Image_DevOps-Platforms-Q2-2025.png)\n\n## Staying at the forefront of AI transformation, with enterprise control\n\nDevSecOps is rapidly evolving, with AI at the forefront of that change. Unfortunately, many AI tools force a choice: cutting-edge capabilities or enterprise security. \n\nForrester scored GitLab a 5 – the highest on their scale – for both the **AI infusion** and **AI risk mitigation** criteria. We’re pleased to see our focus on building innovative AI capabilities that maintain security is being noticed by more than just our customers.\n\nThis dual strength shows up across our GitLab Duo AI offerings, including:\n\n* Duo Workflow (private beta): Autonomous AI agents that handle complex tasks across development, security, and operations — with enterprise-grade guardrails and audit trails.  \n* Agentic Chat: Contextual, conversational AI assistance for everything from code explanations to test creation — with IP protection and privacy controls built in.  \n* Code Suggestions: AI assistance that can predictively complete code blocks, define function logic, generate tests, and propose common code like regex patterns.  \n* AI-native Vulnerability Resolution: Find and fix vulnerabilities with auto explanation and auto-generated merge requests, ensuring a streamlined development process.\n\n## Doing more with less \n\nWe’ve heard loud and clear that DevSecOps teams don’t need more tools and integrations that help them with part of their software delivery lifecycle. They need a seamless, integrated developer experience that covers the entire SDLC.\n\nWe believe GitLab’s scores in the following criteria are validation of our customer-focused strategy:\n\n* **Day zero experience:** Forrester cited our “strong day zero experience,” noting that “everything is ready to run out-of-the-box,” supported by extensive migration tools and tutorials. \n* **Developer tooling:** Forrester pointed to [GitLab Duo with Amazon Q](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws/), our agentic AI offering for AWS customers, as well as our cloud development environment, integrated developer platform, and wikis for documentation as examples.  \n* **Project planning and alignment:** Forrester noted our \"strong compliance center,\" and that we have tools to drive alignment top-down and bottom-up.  \n* **Pipeline security:** Forrester gave us the highest score possible in the pipeline security criterion.  \n* **Build automation and CI:** Forrester cited our build automation and CI with multistage build pipelines and strong self-hosted support.\n\n## Read the report\n\nFor us, being named a Leader in The Forrester Wave™: DevOps Platforms, Q2 2025 speaks to the breadth and depth of our platform’s capabilities, providing a single source of truth for the entire software development lifecycle. No more juggling multiple tools and integrations – GitLab provides a seamless, integrated experience that boosts productivity and reduces friction. We believe this placement reflects the hard work of our team, the many contributions from GitLab’s open source community, the invaluable feedback from our customers, and our dedication to shaping the future of software development.\n\n> #### [Access the report today!](https://about.gitlab.com/forrester-wave-devops-platform/)\n\n*Forrester does not endorse any company, product, brand, or service included in its research publications and does not advise any person to select the products or services of any company or brand based on the ratings included in such publications. Information is based on the best available resources. Opinions reflect judgment at the time and are subject to change. For more information, read about Forrester’s objectivity [here](https://www.forrester.com/about-us/objectivity/).*",[2239,717,1133,9],{"slug":2301,"featured":91,"template":698},"gitlab-named-a-leader-in-the-forrester-wave-devops-platforms-q2-2025","content:en-us:blog:gitlab-named-a-leader-in-the-forrester-wave-devops-platforms-q2-2025.yml","Gitlab Named A Leader In The Forrester Wave Devops Platforms Q2 2025","en-us/blog/gitlab-named-a-leader-in-the-forrester-wave-devops-platforms-q2-2025.yml","en-us/blog/gitlab-named-a-leader-in-the-forrester-wave-devops-platforms-q2-2025",{"_path":2307,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2308,"content":2314,"config":2320,"_id":2322,"_type":14,"title":2323,"_source":16,"_file":2324,"_stem":2325,"_extension":19},"/en-us/blog/gitlab-native-secrets-manager-to-give-software-supply-chain-security-a-boost",{"title":2309,"description":2310,"ogTitle":2309,"ogDescription":2310,"noIndex":6,"ogImage":2311,"ogUrl":2312,"ogSiteName":685,"ogType":686,"canonicalUrls":2312,"schema":2313},"GitLab native secrets manager boosts supply chain security","GitLab is building a secrets manager that is key to providing an end-to-end, cloud-agnostic approach to the management of sensitive information.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749664530/Blog/Hero%20Images/AdobeStock_282096522.jpg","https://about.gitlab.com/blog/gitlab-native-secrets-manager-to-give-software-supply-chain-security-a-boost","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab native secrets manager to give software supply chain security a boost\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Jocelyn Eillis\"}],\n        \"datePublished\": \"2024-05-20\",\n      }",{"title":2315,"description":2310,"authors":2316,"heroImage":2311,"date":1400,"body":2318,"category":693,"tags":2319},"GitLab native secrets manager to give software supply chain security a boost",[2317],"Jocelyn Eillis","In a constantly evolving digital world, keeping the software supply chain and sensitive information secure is a priority for organizations of all sizes. To reduce the complexity associated with managing multiple infrastructure tools, GitLab plans to release a native secrets manager later this year that will enable users to manage and scale secrets across the DevSecOps platform. This cloud-agnostic, built-in solution has a [similar look and feel to the CI Variables experience](https://gitlab.com/groups/gitlab-org/-/epics/11373) in GitLab, which will make it easier to learn and, therefore, lower friction to adopt.\n\n## What are secrets and a secrets manager?\n\n- A **secret** is a piece of data that acts as a credential to authenticate with systems or services. Secrets are highly sensitive and should be protected from unauthorized use or exposure. Examples of secrets include passwords, API keys, and certificates.\n\n- A **secrets manager** is a centralized tool that stores and manages these secrets throughout their lifecycle. Secrets are stored using unique encryption keys in order to achieve isolation, in this case, across GitLab.\n\n## Current state of secrets management within GitLab\n\nBecause GitLab does not currently have a native secrets manager, we have recommended using a third-party solution. Users leverage third-party secrets storage providers through our [OIDC connection method](https://docs.gitlab.com/ee/ci/secrets/id_token_authentication.html) (Free tier) or via native integrations (currently available for [HashiCorp Vault](https://docs.gitlab.com/ee/ci/secrets/#use-vault-secrets-in-a-ci-job), [Azure Key Vault](https://docs.gitlab.com/ee/ci/secrets/azure_key_vault.html), and [Google Secret Manager](https://docs.gitlab.com/ee/ci/secrets/gcp_secret_manager.html) (Premium and Ultimate tier). However, we understand a third-party provider can be resource-prohibitive for some users because of the overhead for setup and managing user roles and integrations, as well as additional costs.\n\n## About the GitLab secrets manager\n\nThe GitLab secrets manager will allow customers to store sensitive credentials within the GitLab DevSecOps platform, which will simplify management and reduce risk of leaking sensitive information. Our [initial release of the native secrets manager](https://gitlab.com/groups/gitlab-org/-/epics/10723) will be focused on bringing secrets management to the CI workflow, then workflows across all of GitLab. We have prioritized options to use an [open source secrets manager](https://openbao.org/) with the GitLab UI. This enables us to stay true to our open core roots while minimizing our security attack surface as an extra layer of protection. \n\nGitLab plans to have the native secrets manager available in Beta release by year-end.\n\n### Aligning secrets management with GitLab Security\n\nAs we continue to iterate, the GitLab secrets manager will integrate with existing security capabilities. Our goal is to automate when possible, while still empowering the user to own security decisions by providing prompts or calls-to-action. Here are some areas we have identified for alignment: \n\n- **Secret detection.** [Detected secrets](https://gitlab.com/groups/gitlab-org/-/epics/13607) can automatically be placed in the native secrets manager. Instances of the secrets in the pipeline will be replaced automatically with the new secret key. \n\n- **Access tokens.** When access tokens are generated, [they will automatically be placed in the secrets manager](https://gitlab.com/gitlab-org/gitlab/-/issues/460606). This eliminates the need for the user to manually create a secret for each access token. This also eliminates the need to expose the value of the token at creation. A similar use case can be applied to [deploy keys](https://gitlab.com/gitlab-org/gitlab/-/issues/432522). \n\n- **Compliance.** [Advancing audit logging](https://about.gitlab.com/direction/govern/compliance/audit-events/#how-we-will-prioritize-adding-new-audit-events) within GitLab makes it easier for admins and security teams to identify access, changes, and deletion for each secret, all within the existing GitLab [audit events](https://docs.gitlab.com/ee/administration/audit_event_types.html).  \n\n- **Secured artifacts.** Enabling a [verifiable way to link job artifacts back to their source code](https://gitlab.com/groups/gitlab-org/-/epics/6207) is critical to ensuring integrity of the software supply chain. Attestations require signing and authentication to verify authenticity in the process and the secrets manager will secure these credentials within GitLab.\n\n## Share your feedback\n\nAt GitLab, we understand a single tool does not fit all. While we are building a native solution, we are also committed to continuing to support our existing third-party integrations for Hashicorp’s Vault, Azure Key Vault, and Google Secret Manager. We envision an ecosystem where multiple secret management solutions are available to customers, ensuring the best-fit solution for our customers’ use cases. \n\nInterested in joining the conversation to shape the future of GitLab’s offerings in the secrets management space? Please [leave us a comment](https://gitlab.com/gitlab-org/gitlab/-/issues/460757). You also can view our current [direction page](https://about.gitlab.com/direction/govern/pipeline_security/secrets_management/) for the latest category updates and follow our progress to building our own secrets manager in our [MVC epic](https://gitlab.com/groups/gitlab-org/-/epics/10723). \n\n_Disclaimer: This blog contains information related to upcoming products, features, and functionality. It is important to note that the information in this blog post is for informational purposes only. Please do not rely on this information for purchasing or planning purposes. As with all projects, the items mentioned in this blog and linked pages are subject to change or delay. The development, release, and timing of any products, features, or functionality remain at the sole discretion of GitLab._\n",[693,695,9],{"slug":2321,"featured":6,"template":698},"gitlab-native-secrets-manager-to-give-software-supply-chain-security-a-boost","content:en-us:blog:gitlab-native-secrets-manager-to-give-software-supply-chain-security-a-boost.yml","Gitlab Native Secrets Manager To Give Software Supply Chain Security A Boost","en-us/blog/gitlab-native-secrets-manager-to-give-software-supply-chain-security-a-boost.yml","en-us/blog/gitlab-native-secrets-manager-to-give-software-supply-chain-security-a-boost",{"_path":2327,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2328,"content":2334,"config":2339,"_id":2341,"_type":14,"title":2342,"_source":16,"_file":2343,"_stem":2344,"_extension":19},"/en-us/blog/gitlab-premium-with-duo",{"title":2329,"description":2330,"ogTitle":2329,"ogDescription":2330,"noIndex":6,"ogImage":2331,"ogUrl":2332,"ogSiteName":685,"ogType":686,"canonicalUrls":2332,"schema":2333},"Unlocking AI for every GitLab Premium and Ultimate customer","GitLab Premium and Ultimate now include GitLab Duo essentials for creating and understanding code throughout the software development lifecycle, all at no additional cost.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749660188/Blog/Hero%20Images/blog-premium-with-duo-cover-0756-fy26-v2-1800x945.png","https://about.gitlab.com/blog/gitlab-premium-with-duo","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Unlocking AI for every GitLab Premium and Ultimate customer\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"David DeSanto, Chief Product Officer, GitLab\"}],\n        \"datePublished\": \"2025-05-15\",\n      }",{"title":2329,"description":2330,"authors":2335,"heroImage":2331,"date":2336,"body":2337,"category":717,"tags":2338},[1869],"2025-05-15","Today, we launch GitLab 18.0, which highlights our latest innovations and plans in core DevSecOps workflows, security and compliance, and AI. __As part of this release, we're excited to announce that GitLab Premium and Ultimate now include essential GitLab Duo AI capabilities at no additional cost.__ All Premium and Ultimate customers will have immediate access to GitLab Duo Code Suggestions and Chat directly in their preferred supported source code editors and IDEs.\n\n## AI for every development team\n\nArtificial intelligence is now at the center of the developer experience. AI enhances coding in many ways: It analyzes your codebase and provides real-time suggestions as you type, creates functions and methods based on your project's context, reduces repetitive tasks, and automates code reviews.\n\nOver the past few years, we've built [GitLab Duo](https://about.gitlab.com/gitlab-duo/) to infuse generative and agentic AI capabilities like these into our platform. Because writing code is just the start of the software lifecycle – our [global DevSecOps study](https://about.gitlab.com/developer-survey/) found that developers spend 79% of their time on tasks other than code creation – we have adopted a strategy to integrate AI throughout the entire software development lifecycle. \n\nNow, we’re excited to take the next step forward by including essential GitLab Duo capabilities in our GitLab Premium and Ultimate tiers, enabling developers to get the benefits of AI at no additional cost.\n\nBy including GitLab Duo Chat and Duo Code Suggestions in Premium and Ultimate, every software engineer can accelerate their workflow within the IDE — without requiring separate tooling, licensing, or governance. All existing Premium and Ultimate customers now have instant access to Duo Chat and Code Suggestions, once they upgrade to GitLab 18.0, and this enhancement becomes standard for all new customers.\n\n> **\"GitLab has already been instrumental in eliminating our reliance on a fragmented toolchain, which cut costs from disconnected solutions, and streamlined our workflow. Enhancing GitLab Premium with Duo will give us even greater efficiency and cost savings as our developers spend less time on routine coding tasks and more time tackling complex challenges that drive real business value.”**\n>\n>- Andrei Nita, Chief Technology Officer at McKenzie Intelligence Services\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1083723619?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"GitLab Premium with Duo Core\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n\u003Cbr>\u003C/br>\nPremium and Ultimate customers now have these AI-native capabilities:\n\n#### GitLab Duo Code Suggestions\n\n* Generate complete functions and code blocks from comments  \n* Get intelligent code completions as you type  \n* Support for 20+ programming languages  \n* Available in most popular IDEs\n\nTake this interactive tour to learn about GitLab Duo Code Suggestions (click on the image to start the tour).\n\n\u003Ca href=\"https://gitlab.navattic.com/code-suggestions\">\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752175911/Blog/b5gdnls7jdyrpeyjby5j.png\" alt=\"GitLab Duo Code Suggestions cover image\">\u003C/a>\n\nLearn more in our [Duo Code Suggestions documentation](https://docs.gitlab.com/user/project/repository/code_suggestions/).\n\n#### GitLab Duo Chat\n\n* Explain unfamiliar code to understand complex functionality  \n* Refactor existing code to improve quality and maintainability  \n* Generate comprehensive test cases to help catch bugs earlier  \n* Fix code issues directly in your workflow\n\n![Duo Chat - API endpoint explanation](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673912/Blog/Content%20Images/Duo_Chat_-_gif_-_API_endpoint_explanation__3_.gif)\n\nLearn more in our [Duo Chat documentation](https://docs.gitlab.com/user/gitlab_duo_chat/).\n\n> **\"For us, as GitLab users, Duo's intelligent code suggestions have become a daily asset for our developers. Combined with the chat feature, it allows for immediate feedback and iteration, resulting in faster development cycles and a more secure codebase. It's a seamless and powerful addition to our workflows.\"**\n>\n>- Felix Kortmann, Chief Technology Officer, Ignite by FORVIA HELLA\n\n## Duo Enterprise now available to GitLab Premium customers\n\nDue to strong customer demand, we're also excited to share that [GitLab Premium](https://about.gitlab.com/pricing/premium/) customers now can purchase Duo Enterprise, our full suite of AI offerings, without needing to upgrade to GitLab Ultimate. Premium customers can enjoy a rich AI experience seamlessly integrated across the software development lifecycle. This includes exciting GitLab Duo capabilities like:\n\n* [Root Cause Analysis](https://docs.gitlab.com/user/gitlab_duo/use_cases/#root-cause-analysis-use-cases) helps resolve CI/CD pipeline failures quickly, ensuring your CI/CD pipelines remain green.  \n* [Code Review](https://docs.gitlab.com/user/project/merge_requests/duo_in_merge_requests/#have-gitlab-duo-review-your-code) enables faster merge request reviews by leveraging Duo as a code reviewer.  \n* [Advanced Chat](https://docs.gitlab.com/user/gitlab_duo_chat/) summarizes conversations, helps understand code changes, and provides advanced configuration assistance.  \n* [Self-Hosted](https://docs.gitlab.com/administration/gitlab_duo_self_hosted/) enables Duo to be leveraged within air-gapped and offline environments by hosting approved AI models for Duo to use.\n\nIn addition to Duo Enterprise availability, we continue to invest in the success of GitLab Premium customers. Since the launch of GitLab 17, [we’ve shipped more than a hundred features and improvements](https://gitlab.com/gitlab-org/gitlab/-/releases), including: \n\n* [**CI/CD Catalog**](https://about.gitlab.com/blog/ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch/) enables developers to share, discover, and reuse   \npre-existing CI/CD components and configurations.  \n* [**Artifact registry**](https://docs.gitlab.com/user/packages/virtual_registry/) gives developers secure access to artifacts and seamless integration with CI/CD pipelines.  \n* [**Remote development**](https://docs.gitlab.com/user/project/remote_development/) enables developers to work in on-demand,  \ncloud-based development environments.\n\n> [Learn more about GitLab Premium features.](https://about.gitlab.com/pricing/premium/#wp-premium-features)\n\n## GitLab Duo: AI that meets organizations where they are\n\nGitLab customers have a comprehensive menu of Duo offerings, across our Pro and Enterprise solutions, to meet you where you are in the AI adoption cycle – the further along your teams are, the more capabilities you can use to build, test, and deploy secure software faster.\n\n![Key features in Duo plans](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673912/Blog/Content%20Images/Screenshot_2025-05-14_at_8.50.34_AM.png)\n\n## How current GitLab Ultimate and Premium customers can get started with Duo\n\nStarting with GitLab 18.0, for existing Ultimate and Premium customers, Duo Code Suggestions and Chat features will be off by default but can easily be enabled – learn how below.\n\nTo start experiencing GitLab Premium and Ultimate with Duo: \n\n1. Ensure you're on GitLab Premium or Ultimate. If not, you can start a free trial. \n\n2. Enable GitLab Duo in your organization settings.\n\n3. If using a local IDE, install the appropriate GitLab [Editor Extension](https://docs.gitlab.com/editor_extensions/#available-extensions). \n\n4. Start using Code Suggestions and Chat in your preferred supported local IDE or the GitLab Web IDE.\n\n**Note:** For new customers and trials, GitLab's AI capabilities will be enabled automatically.\n\n## AI-native development requires a DevSecOps platform\n\nAI is fundamentally reshaping the developer experience. Organizations won't just have more people building software. They'll have more production-ready code generated by AI – **making GitLab more essential than ever.** \n\nWe built GitLab Premium and Ultimate with Duo specifically for this new reality, giving teams one secure foundation for all their code. As AI generates code across your organization, GitLab becomes your control center: no separate tools for security scanning, compliance checks, or managing pipelines. Just a single, unified platform that scales with your organization and helps ensure all code meets your standards before reaching production. As AI accelerates your development, GitLab enables you to maintain control, security, and quality from end to end.\n\n> To learn more about GitLab Duo and all the ways it can transform how your team works, [visit our GitLab Premium page](https://about.gitlab.com/pricing/premium/) or if you are a GitLab customer, reach out to your GitLab representative to schedule a demo. Finally, we invite you to join us on June 24, 2025, for our [GitLab 18 virtual launch event](https://about.gitlab.com/eighteen/) to learn about the future of AI-native software development.\n",[803,9,1133,695,717],{"slug":2340,"featured":91,"template":698},"gitlab-premium-with-duo","content:en-us:blog:gitlab-premium-with-duo.yml","Gitlab Premium With Duo","en-us/blog/gitlab-premium-with-duo.yml","en-us/blog/gitlab-premium-with-duo",{"_path":2346,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2347,"content":2352,"config":2357,"_id":2359,"_type":14,"title":2360,"_source":16,"_file":2361,"_stem":2362,"_extension":19},"/en-us/blog/gitlab-receives-ally-technology-partner-award-for-operational-excellence",{"title":2348,"description":2349,"ogTitle":2348,"ogDescription":2349,"noIndex":6,"ogImage":772,"ogUrl":2350,"ogSiteName":685,"ogType":686,"canonicalUrls":2350,"schema":2351},"GitLab receives Ally Technology Partner Award for Operational Excellence","Financial firm recognizes GitLab for its ability to deliver lean, automated, and streamlined business models that drive simplified and resilient solutions for Ally and its customers.","https://about.gitlab.com/blog/gitlab-receives-ally-technology-partner-award-for-operational-excellence","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab receives Ally Technology Partner Award for Operational Excellence\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sandra Gittlen\"}],\n        \"datePublished\": \"2024-06-18\",\n      }",{"title":2348,"description":2349,"authors":2353,"heroImage":772,"date":2354,"body":2355,"category":1133,"tags":2356},[1295],"2024-06-18","Earlier this month, Ally Financial, a leading digital financial services company, awarded GitLab its Ally Technology Partner Award for Operational Excellence, citing the fundamental role GitLab and the GitLab DevSecOps platform play for Ally and its customers.\n\n\"This award is meant to recognize partners who help us ensure the resiliency of our solutions and who are committed to not just providing us products but helping us to operationalize and maintain those products on a sustained basis,\" said Spencer Cremers, CIO of Enterprise Technology Operations at Ally Financial. \"GitLab is a critical toolset that is fundamental to our day-to-day operations.\" \n\nAlly began migrating to GitLab in recent years and now has a large number of applications that have fully adopted DevSecOps principles. GitLab enables Ally to carry out thousands of builds per day across all environments and deploy numerous builds into Production every week.\n\nGitLab was also lauded for helping support Ally's operational goals. \"GitLab also provides tremendous operational support when we need service or responses on a short-notice basis,\" Cremers said.\n\nHe added that GitLab is helping the company explore \"virtualized development environments for a more efficient and predictable space for developers to learn,\" as well as security tools to shift security left in the software development lifecycle.\n\nThis is the second year GitLab has won an [Ally Technology Partner Award](https://www.ally.com/tech/partnering-to-drive-transformation-2nd-annual-ally-technology-partner-awards/). In 2023, the first year these awards were given, the financial firm recognized GitLab for \"[Velocity with Quality](https://www.ally.com/tech/recognizing-delivery-in-ecosystem-ally-technology-partner-awards/)\" for excellent speed to market, responsiveness, and flexibility, allowing Ally to deliver value to customers quickly. \n\n> Learn [how Ally uses the GitLab DevSecOps Platform](https://about.gitlab.com/customers/ally/) to achieve some big wins, including a 55% increase in deployment velocity and $300k yearly cost savings.",[284,1133,9],{"slug":2358,"featured":6,"template":698},"gitlab-receives-ally-technology-partner-award-for-operational-excellence","content:en-us:blog:gitlab-receives-ally-technology-partner-award-for-operational-excellence.yml","Gitlab Receives Ally Technology Partner Award For Operational Excellence","en-us/blog/gitlab-receives-ally-technology-partner-award-for-operational-excellence.yml","en-us/blog/gitlab-receives-ally-technology-partner-award-for-operational-excellence",{"_path":2364,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2365,"content":2370,"config":2375,"_id":2377,"_type":14,"title":2378,"_source":16,"_file":2379,"_stem":2380,"_extension":19},"/en-us/blog/gitlab-rotating-omnibus-linux-package-signing-key",{"title":2366,"description":2367,"ogTitle":2366,"ogDescription":2367,"noIndex":6,"ogImage":772,"ogUrl":2368,"ogSiteName":685,"ogType":686,"canonicalUrls":2368,"schema":2369},"GitLab rotating Omnibus Linux package signing key","Learn who is impacted by the rotation of the GNU Privacy Guard (GPG) key and what you need to know.","https://about.gitlab.com/blog/gitlab-rotating-omnibus-linux-package-signing-key","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab rotating Omnibus Linux package signing key\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"GitLab\"}],\n        \"datePublished\": \"2025-04-16\",\n      }",{"title":2366,"description":2367,"authors":2371,"heroImage":772,"date":2372,"body":2373,"category":779,"tags":2374},[822],"2025-04-16","As part of our standard security practices, GitLab is rotating the GNU Privacy Guard (GPG) key used to sign all Omnibus Linux packages on April 16, 2025. This key ensures the integrity of our packages, verifying that they have not been tampered with after creation in our CI pipelines. This key is distinct from the repository metadata signing key used by package managers and the GPG signing key for the GitLab Runner. GitLab is revoking the existing key and will begin signing upcoming packages using a new key with fingerprint `98BF DB87 FCF1 0076 416C 1E0B AD99 7ACC 82DD 593D`.\n\n**What do I need to do?**\n\nIf you currently validate the GPG signatures of GitLab Omnibus packages, you will need to update your copy of the package signing key. Packages published before this article will remain signed with the previous key.\n\nThe package signing key is separate from the repository metadata signing key used by your operating system’s package managers (like `apt` or `yum`). Unless you are specifically verifying package signatures or have configured your package manager to verify the package signatures, no action is required to continue installing GitLab Omnibus packages.\n\n**Where can I find the new key?**\n\nThe new key can be downloaded from `packages.gitlab.com` using the URL:\n\n[https://packages.gitlab.com/gitlab/gitlab-ee/gpgkey/gitlab-gitlab-ee-CB947AD886C8E8FD.pub.gpg](https://packages.gitlab.com/gitlab/gitlab-ee/gpgkey/gitlab-gitlab-ee-CB947AD886C8E8FD.pub.gpg)\n\nPlease check the documentation for more information concerning [verification of the package signatures](https://docs.gitlab.com/omnibus/update/package_signatures#package-signatures).\n\n**What do I do if I still have problems?**\n\nPlease open an issue in the [omnibus-gitlab issue tracker](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/new?issue&issuable_template=Bug).",[9,717,693],{"slug":2376,"featured":6,"template":698},"gitlab-rotating-omnibus-linux-package-signing-key","content:en-us:blog:gitlab-rotating-omnibus-linux-package-signing-key.yml","Gitlab Rotating Omnibus Linux Package Signing Key","en-us/blog/gitlab-rotating-omnibus-linux-package-signing-key.yml","en-us/blog/gitlab-rotating-omnibus-linux-package-signing-key",{"_path":2382,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2383,"content":2389,"config":2396,"_id":2398,"_type":14,"title":2399,"_source":16,"_file":2400,"_stem":2401,"_extension":19},"/en-us/blog/gitlab-supports-banks-in-navigating-regulatory-challenges",{"title":2384,"description":2385,"ogTitle":2384,"ogDescription":2385,"noIndex":6,"ogImage":2386,"ogUrl":2387,"ogSiteName":685,"ogType":686,"canonicalUrls":2387,"schema":2388},"GitLab supports banks in navigating regulatory challenges","Learn the upcoming changes to key frameworks, how they impact organizations, and the DevSecOps platform features that can help address them.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749664874/Blog/Hero%20Images/AdobeStock_880918603.jpg","https://about.gitlab.com/blog/gitlab-supports-banks-in-navigating-regulatory-challenges","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab supports banks in navigating regulatory challenges\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"George Kichukov\"},{\"@type\":\"Person\",\"name\":\"Allie Holland\"}],\n        \"datePublished\": \"2025-01-09\",\n      }",{"title":2384,"description":2385,"authors":2390,"heroImage":2386,"date":2393,"body":2394,"category":693,"tags":2395},[2391,2392],"George Kichukov","Allie Holland","2025-01-09","The risk of cyber attacks in the banking industry has reached unprecedented levels. Studies by the [International Monetary Fund](https://www.imf.org/-/media/Files/Publications/GFSR/2024/April/English/ch3.ashx) reveal that the financial sector is particularly vulnerable to cyber threats, with nearly one-fifth of reported incidents in the past two decades targeting this industry alone. As these threats continue to escalate, they drive the need for a regulatory response, prompting the banking and financial services industry to prepare for significant changes. GitLab enables financial institutions to proactively tackle these challenges, supporting banks on their regulatory journey while ensuring the operational resilience needed to protect the sensitive data pervasive throughout the banking ecosystem.\n\n## Understanding the upcoming regulatory changes\n\nAcknowledging that the regulatory landscape frequently changes, this article will concentrate on key frameworks in the EU poised to shape the future of banking and financial services. These frameworks not only address current industry challenges but also set the foundation for the development of a more secure and resilient financial ecosystem.\n\nHere are several regulations that are demanding the attention of the financial services industry. \n\n### [European Cyber Resilience Act (CRA)](https://digital-strategy.ec.europa.eu/en/policies/cyber-resilience-act)  \n\nImplemented as of **January 2024,** with a grace period extending for two years, the CRA establishes a comprehensive framework to enhance cybersecurity standards for digital products and services within the EU. This regulation seeks to mitigate the risks of vulnerabilities in software and hardware by ensuring that security is integrated throughout the entire product lifecycle, promoting a proactive “shift left” approach to security. By embedding security measures from the design phase onward, the CRA aims to safeguard the digital economy and bolster consumer trust in digital services.\n\n### [Digital Operational Resilience Act (DORA)](https://www.eiopa.europa.eu/digital-operational-resilience-act-dora_en)\n\nTaking effect on **January 17, 2025**, the Digital Operations Resilience Act aims to ensure that financial institutions can withstand, respond to, and recover from all types of information and communication technology related disruptions and threats. The goal is to unify and strengthen the resilience of the financial sector across Europe. \n\n### [European Data Act](https://digital-strategy.ec.europa.eu/en/policies/data-act)  \n\nAnticipated to become applicable on **September 12, 2025**, this regulation seeks to provide clearer rules regarding data use and sharing for AI and the internet of things, or IoT, enhancing data access and fostering innovation in various sectors, including finance.\n\n## Implications for banks and financial institutions \n\nAs financial institutions adapt to these evolving regulatory frameworks, the implications are significant and far-reaching. For instance, PYMNTS reports [59% of bankers see their legacy systems as a major business challenge](https://www.pymnts.com/digital-first-banking/2024/three-quarters-of-banks-face-digital-banking-infrastructure-issues/). These challenges present obstacles in the delivery of modern services, while hindering their ability to both detect and respond to modern cyber threats. According to the [2024 IBM Data Breach Report](https://www.ibm.com/downloads/cas/1KZ3XE9D), the average cost of a data breach in the financial services sector is a staggering $6.08 million, with breaches taking an average of 258 days to identify and contain. Unfortunately for banks, the most common type of data stolen or compromised was customer personally identifiable information, or PII. This highlights the urgent need for organizations to modernize their security practices and infrastructure.\n\nHere are four ways to address this challenge.\n\n1. **Increase investment in technology:** Banks will need to significantly increase their investments in technology and infrastructure. This involves evaluating current systems and processes to ensure they align with the stringent requirements of CRA, DORA, the European Data Act, and other regulations.  \n\n2. **Heighten risk management practices:** A cultural shift will be necessary within organizations, as teams will need to prioritize risk management and resilience strategies. DORA, in particular, emphasizes not just compliance but the ability to anticipate and recover from disruptions.  \n\n3. **Enhance data governance:** Many of these new regulations will require banks to prepare for new approaches to data sharing and governance. Banks will have to rethink how data is collected, stored, and analyzed, with a strong focus on transparency, accountability, and collaboration across departments.  \n\n4. **Strengthen cybersecurity:** As cyber threats evolve, the importance of robust cybersecurity measures cannot be overstated. The CRA mandates that financial institutions implement comprehensive security protocols, requiring banks to prioritize cybersecurity investments at every phase of the software development lifecycle. \n\n## How GitLab can help   \nWith years of experience working with some of the [largest financial organizations in the world](https://about.gitlab.com/customers/all/?industry=financial-services), GitLab stands ready to support banks and other financial institutions in their compliance efforts. Our integrated suite of features empowers development teams to streamline their workflows, allowing them to concentrate on software development rather than becoming bogged down by the manual tracking and monitoring of evolving compliance regulations. \n\n**[GitLab Dedicated](https://about.gitlab.com/dedicated/)**, our fully isolated, single-tenant SaaS solution, is designed to meet the complex compliance and data residency requirements of highly regulated industries. Hosted and managed by GitLab, in your chosen cloud region, GitLab Dedicated ensures that sensitive data remains secure and compliant with local regulations. GitLab can help banks navigate these challenges effectively with:\n\n1. [Comprehensive application security and compliance features](https://about.gitlab.com/stages-devops-lifecycle/secure/)\n\n-  __Security scanning built into developer workflows:__ Many financial institutions still rely on disparate tools for security checks, which can lead to gaps in coverage and oversight. GitLab offers built-in security scanning tools that automatically identify vulnerabilities and provide remediation guidance throughout the application lifecycle. By embedding security checks into [CI/CD pipelines](https://about.gitlab.com/topics/ci-cd/cicd-pipeline/), banks can detect and resolve issues early in the development process, where they are less costly and less risky to fix, ensuring that they adhere to necessary security protocols. GitLab offers the following [security scanner types](https://docs.gitlab.com/ee/user/application_security/secure_your_application.html):\n\n      1. [Static Application Security Testing (SAST)](https://docs.gitlab.com/ee/user/application_security/sast/index.html) \n\n      2. [Dynamic Application Security Testing (DAST)](https://docs.gitlab.com/ee/user/application_security/dast/index.html)  \n      3. [Secret Detection](https://docs.gitlab.com/ee/user/application_security/secret_detection/index.html)\n\n      4. [Infrastructure as Code (IaC) Scanning](https://docs.gitlab.com/ee/user/application_security/iac_scanning/index.html)\n\n      5. [Dependency (+ License) Scanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/index.html)  \n      6. [Coverage-guided Fuzz Testing](https://docs.gitlab.com/ee/user/application_security/coverage_fuzzing/index.html)  \n\n      7. [Web API Fuzz Testing](https://docs.gitlab.com/ee/user/application_security/api_fuzzing/)  \n\n      8. [Container Scanning](https://docs.gitlab.com/ee/user/application_security/container_scanning/index.html)  \n\n      9. [API Security Scanning](https://docs.gitlab.com/ee/user/application_security/api_security/index.html)\n\n- __Compliance and enforceable policies:__ Our platform enables [separation of duties](https://about.gitlab.com/blog/ensuring-compliance/), by allowing security and compliance teams to manage security policies independently, allowing developers to focus purely on development. This approach supports the [principle of least privilege](https://about.gitlab.com/blog/the-ultimate-guide-to-least-privilege-access-with-gitlab/), where developers access only what they need. For multinational banks or financial institutions who operate globally, GitLab’s policies and compliance dashboards assist in meeting strict geographical and regulatory requirements. These tools help maintain consistent adherence to compliance regulations, giving organizations clear visibility into their security posture across regions, industries, and regulations.\n\n- __[Software supply chain security](https://about.gitlab.com/solutions/supply-chain/):__ GitLab ensures the security of the entire build, development, and deployment environment through a comprehensive approach to software supply chain security. Our [software composition analysis (SCA)](https://about.gitlab.com/blog/reduce-supply-chain-risk-with-smarter-vulnerability-prioritization/) provides deep insights into component versions, licenses, and known vulnerabilities in dependencies which can be proactively remediated to reduce enterprise risk. This comprehensive approach also includes [software bill of materials (SBOM)](https://docs.gitlab.com/ee/user/application_security/dependency_list/) generation, ensuring transparency and compliance with industry standards. Finally, as highlighted above, GitLab provides controls to enforce the principle of least privilege to mitigate threats that compromise the software development environment itself.  \n\n2. [Robust risk management tools](https://docs.gitlab.com/ee/user/application_security/)\n\n- __Issue tracking and management:__ Within a bank, ineffective risk management can lead to overlooked vulnerabilities and inefficient mitigation strategies. GitLab’s issue tracking capabilities allow security vulnerabilities to appear alongside feature requests in the backlog, creating full visibility across teams. Sensitive issues can also be marked as confidential, so that only those who have sufficient permissions can access. This combination of transparency and controlled access supports a culture of collaboration and accountability, as development, security, and operations teams work together seamlessly on risk management. This cultural shift is crucial; rather than merely purchasing tools or one-off solutions, organizations must embed collaboration into their workflows to ensure security becomes a key part of the development process. \n\n- __[Automated testing](https://about.gitlab.com/blog/how-to-choose-the-right-security-scanning-approach/):__ A common challenge in the financial services industry is centered around the fact that homegrown solutions that were once robust processes become slow and cumbersome over time, leading to reduced agility. So much so that [Forbes](https://www.aba.com/-/media/documents/industry-insights/2023-thoughtmachine-banking-at-a-crossroads-the-threat-of-legacy-infrastructure.pdf?rev=6ce18fa56f0547e5a8c8433b50aef931) found that 60% of banking leaders consider legacy infrastructure to be the major factor keeping them from unlocking incremental growth. To compensate, the industry has shifted toward giving developers more freedom, but often at the cost of maintaining high security standards.\n\u003Cbr>\u003C/br>\n  GitLab solves this challenge by [automating testing within CI/CD pipelines](https://about.gitlab.com/topics/devops/devops-test-automation/), enabling financial institutions to maintain both speed and security. Developers can configure pipelines to fit their workflows, while security and compliance teams retain control over policies, ensuring adherence to critical security measures. By automating testing processes, GitLab helps banks remain resilient and functional, reducing the likelihood of disruptions.\n\n3. [Enhanced data governance](https://about.gitlab.com/stages-devops-lifecycle/govern/)\n\n- __Data management and compliance:__ GitLab’s data management features enable organizations to securely handle sensitive information. With embedded [audit logs](https://docs.gitlab.com/ee/user/compliance/audit_events.html), banks can track data access and changes, ensuring transparency and accountability in their data practices. These logs can show actions such as who changed the permission level of a particular user for a project, and when.\n\n- __[Collaboration tools](https://about.gitlab.com/topics/gitops/gitops-gitlab-collaboration/):__ GitLab promotes collaboration among cross-departmental teams, facilitating communication between IT, compliance, and business units. This integrated approach is essential for effective data governance, allowing banks to align their data practices with organizational goals.\n\n4. [Efficient incident reporting and response](https://docs.gitlab.com/ee/operations/incident_management/)\n\n- __[Centralized incident management](https://handbook.gitlab.com/handbook/engineering/infrastructure/incident-management/):__ GitLab provides centralized project management capabilities for logging and tracking significant incidents. This allows teams to respond quickly and effectively, ensuring that incidents are managed in a timely manner.\n\n- __[Incident response guides](https://handbook.gitlab.com/handbook/security/security-operations/sirt/sec-incident-response/):__ With GitLab, organizations can develop and maintain incident response plans within the platform. By simulating potential incidents and testing response protocols, banks can ensure preparedness and resilience in the face of unexpected challenges.\n\n5. [Documentation and audit readiness](https://docs.gitlab.com/ee/administration/compliance.html)\n\n- __Continuous compliance documentation:__ Traditionally, banks have been locked into rigid 12-month audit cycles, preparing documentation to meet stringent regulations like the Bank Secrecy Act (BSA), Automated Clearing House (ACH) rules, and Anti-Money Laundering (AML) requirements. However, as the pace and complexity of threats grow, the financial industry is shifting from reactive, periodic audits to a proactive, [continuous compliance model](https://about.gitlab.com/blog/meet-regulatory-standards-with-gitlab/). With GitLab, teams know exactly where they stand at any given moment, leveraging real-time compliance data to their advantage. This continuous insight empowers teams to address issues as they arise, rather than waiting for an audit, creating a more agile and resilient compliance posture.\n\n- **Customizable reporting:** With GitLab’s customizable reporting features, organizations can generate detailed reports that showcase compliance violations based on severity levels, violation types, and merge request titles. These reports provide valuable insights for both internal stakeholders and external parties, ensuring transparency and accountability.\n\n## Connect with GitLab today\n\nAs banks and financial institutions embrace these regulatory changes, GitLab not only provides the technology necessary to ensure compliance, but also fosters a culture of continuous improvement. This proactive approach allows financial institutions to release software with confidence, knowing they have the systems in place to mitigate risks and respond quickly to incidents.\n\nGitLab’s commitment to supporting the financial sector through these transitions ensures that organizations are not only compliant but also resilient and prepared for the challenges ahead. Together, we can build a safer and more secure financial future. \n\n> **[Reach out](https://about.gitlab.com/solutions/finance/) to learn more about how we can help meet your regulatory challenges.**\n\n## Read more\n\n- [What the Digital Operational Resilience Act means for banks](https://about.gitlab.com/blog/what-the-digital-operational-resilience-act-means-for-banks/)\n- [Meet regulatory standards with GitLab security and compliance](https://about.gitlab.com/blog/meet-regulatory-standards-with-gitlab/)\n- [How to ensure separation of duties and enforce compliance with GitLab](https://about.gitlab.com/blog/ensuring-compliance/)",[570,951,9,693],{"slug":2397,"featured":6,"template":698},"gitlab-supports-banks-in-navigating-regulatory-challenges","content:en-us:blog:gitlab-supports-banks-in-navigating-regulatory-challenges.yml","Gitlab Supports Banks In Navigating Regulatory Challenges","en-us/blog/gitlab-supports-banks-in-navigating-regulatory-challenges.yml","en-us/blog/gitlab-supports-banks-in-navigating-regulatory-challenges",{"_path":2403,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2404,"content":2410,"config":2416,"_id":2418,"_type":14,"title":2419,"_source":16,"_file":2420,"_stem":2421,"_extension":19},"/en-us/blog/gitlab-trust-center-welcome-to-self-service-customer-assurance",{"title":2405,"description":2406,"ogTitle":2405,"ogDescription":2406,"noIndex":6,"ogImage":2407,"ogUrl":2408,"ogSiteName":685,"ogType":686,"canonicalUrls":2408,"schema":2409},"GitLab Trust Center: Welcome to self-service customer assurance","The single, unified trust center provides access to security and privacy collateral, streamlined questionnaire submissions, an interactive knowledge base, and GitLab updates.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099558/Blog/Hero%20Images/Blog/Hero%20Images/gitlabflatlogomap_gitlabflatlogomap.png_1750099558369.png","https://about.gitlab.com/blog/gitlab-trust-center-welcome-to-self-service-customer-assurance","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Trust Center: Welcome to self-service customer assurance\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Joseph Longo\"}],\n        \"datePublished\": \"2024-01-09\",\n      }",{"title":2405,"description":2406,"authors":2411,"heroImage":2407,"date":2413,"body":2414,"category":693,"tags":2415},[2412],"Joseph Longo","2024-01-09","GitLab, the most comprehensive AI-powered DevSecOps platform, exhibits unity, simplicity, and efficiency. To effectively represent GitLab and support our customers, we created the [GitLab Trust Center](https://trust.gitlab.com), a centralized, interactive, information portal that exhibits the same characteristics.\n\nThe GitLab Trust Center is powered by [SafeBase](http://safebase.io/), which allows us to maintain a single, unified location for communicating our compliance and assurance credentials, hosting our security and privacy documentation for customer consumption, sharing important notices such as responses to third-party breaches, and hosting our internal knowledge base where customers can readily access the same answers we provide in questionnaire responses. The GitLab Trust Center includes a portal for both GitLab.com and GitLab Dedicated.\n\n## Creating the GitLab Trust Center\n\nWhen I joined GitLab in 2022, a single, unified trust center did not exist. We maintained two distinct pages, our \"Trust Center\" page, which primarily highlighted our compliance and assurance credentials, and our Customer Assurance Package (CAP) page, which allowed the community to preview the different CAPs we maintained, review the documents contained within each package, and download/request each package.\n\nWhile these pages allowed us to support our customers' requests, they could be difficult to find and were supported by manual processes to triage inbound requests for security documentation, questionnaires, and other miscellaneous security and privacy requests. We needed to redefine our processes and deploy a trust center that represented GitLab.\n\n![trust center screenshot](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099566/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750099566270.png)\n\n## The benefits of the GitLab Trust Center\n\nLet's dive into the biggest benefits of the new Trust Center.\n\n### Self-service consumption of security and privacy collateral\n\nCustomers can download all of our available security and privacy collateral directly from the GitLab Trust Center.\n\nFor documents that require an NDA, prospects and customers can sign the NDA directly in SafeBase to receive access to documents in minutes rather than days.\n\n### Streamlined questionnaire submissions\n\nFor customers with questions that are not answered by the available collateral or knowledgebase, the GitLab Trust Center offers an interface to upload questions directly.\n\nCustomers can use the \"Submit a Questionnaire\" button in the upper right corner of the Trust Center to submit a questionnaire or a link to their third-party questionnaire portal.\n\n![trust center questionnaire](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099566/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750099566271.png)\n\nThis simple process allows us to centralize our customer assurance activities in a single location and empowers customers to be self-sufficient, which will ultimately accelerate security review processes.\n\n### GitLab updates subscription\n\nCustomers can now subscribe to the GitLab Trust Center for the latest updated collateral documents and communications related to third-party security incidents.\n\nThis proactive approach delivers important and actionable information as quickly as possible and further enables our customers to be self-sufficient through their existing workflows.\n\n![trust center subscribe ](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099566/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750099566272.png)\n\n### Trust through transparency\n\nAt GitLab, we believe transparency is a critical component of building trust. As a [handbook-first company](https://handbook.gitlab.com/security), we publicize a wealth of information, including all of our [controlled documents](https://handbook.gitlab.com/handbook/security/controlled-document-procedure/) (policies, standards, etc.), many of our internal processes, and much more!\n\nTo fully support our transparent culture and to enable our customers to be as self-sufficient as possible, we have made our knowledge base publicly available through the GitLab Trust Center so all customers can self-serve answers to their questions at any time.\n\nThe knowledge base consists of questions and answers related to many different topics that are typically found in vendor review questionnaires. You can search and filter for the information you need to support your third-party risk management (TPRM) reviews and confirm that GitLab is a trusted partner that will enable your organization to unlock its full potential and develop secure code faster.\n\n## Learn more\n\nTo learn more about our 100% self-service customer assurance process and how you can streamline your TPRM review, visit the [GitLab Trust Center](https://trust.gitlab.com) and explore all of the great content and resources available to you.",[9,1075,693],{"slug":2417,"featured":91,"template":698},"gitlab-trust-center-welcome-to-self-service-customer-assurance","content:en-us:blog:gitlab-trust-center-welcome-to-self-service-customer-assurance.yml","Gitlab Trust Center Welcome To Self Service Customer Assurance","en-us/blog/gitlab-trust-center-welcome-to-self-service-customer-assurance.yml","en-us/blog/gitlab-trust-center-welcome-to-self-service-customer-assurance",{"_path":2423,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2424,"content":2430,"config":2435,"_id":2437,"_type":14,"title":2438,"_source":16,"_file":2439,"_stem":2440,"_extension":19},"/en-us/blog/gitlab-ultimates-total-economic-impact-483-roi-over-3-years",{"title":2425,"description":2426,"ogTitle":2425,"ogDescription":2426,"noIndex":6,"ogImage":2427,"ogUrl":2428,"ogSiteName":685,"ogType":686,"canonicalUrls":2428,"schema":2429},"GitLab Ultimate's total economic impact: 483% ROI over 3 years","A Forrester Consulting study of GitLab Ultimate finds that the DevSecOps platform enhanced security posture with 5x time saved on security-related activities.\n","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098354/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%281%29_5XrohmuWBNuqL89BxVUzWm_1750098354056.png","https://about.gitlab.com/blog/gitlab-ultimates-total-economic-impact-483-roi-over-3-years","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Ultimate's total economic impact: 483% ROI over 3 years\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Dave Steer\"}],\n        \"datePublished\": \"2024-11-13\",\n      }",{"title":2425,"description":2426,"authors":2431,"heroImage":2427,"date":2432,"body":2433,"category":1133,"tags":2434},[2122],"2024-11-13","A powerful DevSecOps platform streamlines operations, prevents security vulnerabilities from disrupting (and costing) your business, increases productivity, and fosters a culture of innovation and collaboration. That's exactly what we built GitLab to do, and our Ultimate tier represents the full power of our platform. To see the real-world results, we commissioned Forrester Consulting to create a “Total Economic Impact™ of GitLab Ultimate” study. Here’s what we discovered at a glance. \n\nAccording to the study, for a composite organization based on interviewed customers, GitLab delivered:  \n\n* **Three-year ROI of 483%**  \n* **400% improvement in developer productivity**  \n* **15x faster time to first release\u003Csup>1\u003C/sup>**  \n* **5x time saved on security-related activities**\n\n**Overall, GitLab enables 50% more work with business value.** \n\nThe numbers tell a clear story: GitLab's platform transforms how teams work together. Whether you’re an application security lead tasked with improving the company’s security posture, a developer looking to deliver high-quality code faster, or a CTO looking for a scalable, secure, and flexible DevSecOps platform, this study (see full methodology below) shows that GitLab Ultimate delivers. Let’s break down the results.  \n\n> Download the full [2024 Forrester Consulting “Total Economic Impact of GitLab Ultimate” study](https://about.gitlab.com/resources/study-forrester-tei-gitlab-ultimate/).\n\n## **1\\. Three-year ROI of 483%**\n\n*“The big win for us was efficiency — both in administration and in overall operations. Now, everyone can work collaboratively, and we can easily automate our pipeline. I’m also able to move personnel around to complete different tasks more efficiently. Rather than needing to train on different tools across programs, now it’s just ‘learn GitLab,’ and they’re ready to begin working.”* - CTO and Senior Vice President, Defense industry\n\nThe study found that teams started seeing payback within six months of implementing GitLab Ultimate, primarily through improved efficiency. With a **483% ROI over three years**, organizations reduced their software toolchain costs by 25% and cut the time IT teams spent on administering complex toolchains by 75%. Beyond the cost savings, moving to a unified platform fundamentally improves how teams develop and deliver software.\n\n## **2\\. 400% improvement in productivity**\n\n*“When I have conversations about GitLab with our developers, they universally agree that it has increased productivity at our organization across teams and roles. We now have one platform that has functions that everyone can use.”* - Software architect, Energy/Research industry\n\nDevelopers thrive in environments where they can easily switch between tasks without losing momentum. According to the study, developers can reclaim up to 305 hours per year by using [testing automation](https://about.gitlab.com/topics/devops/devops-test-automation/) within GitLab to help them test more frequently and track and fix bugs faster, all within a single interface with no context switching. This streamlined workflow allows them to focus on coding rather than juggling multiple tools and processes.\n\nThe productivity gains extend to onboarding, too: new hires in the composite organization’s software development team ramped up to full productivity 75% faster (i.e., in 1.5 weeks instead of 1.5 months). The impact is clear: Everyone on the team can contribute meaningful work sooner. \n\n## **3\\. 15x faster time to first release**\n\n*“Our superpower is software. It’s measured in terms of velocity and the ability to get new capabilities into the hands of our customers. For that to remain our primary focus, it just made economic sense to \\[consolidate\\] onto a single platform.”* - CTO and Senior Vice President, Defense industry\n\nThe summary data from the customer interviews reveals that GitLab enables organizations to accelerate first production release by 15 times. This boost is achieved through faster project initiation, more frequent software releases, and a proactive approach to security that natively integrates security scans into the development process from the outset. Even with this increase in velocity, software quality, and security remain at the same high levels, thanks to developers' ability to fix issues early and quickly. \n\nWith [security built directly into the development process](https://about.gitlab.com/solutions/security-compliance/), developers can identify, prioritize, and remediate vulnerabilities without disrupting their flow. This unified approach to managing the entire software development lifecycle means teams can move faster without compromising on security.\n\n## **4\\. 5x time saved on security-related activities**\n\n*“Integrating security and quality scanners into the pipeline was a game changer for us. With more automation and less manual work, we’re seeing fewer failures, fewer problems, and faster progress.”* - Program Manager, Finance industry \n\nSecurity is top-of-mind for every organization, as development speeds up and threats keep evolving. GitLab saves security team members in the composite organization **78 hours per member per year** by automating recurring tasks like disaster recovery prep, auditing, and compliance checks. GitLab also improves visibility into software development processes, helping security and development teams work together more efficiently.  \n\nCybersecurity and software development teams at the composite organization **managed and mitigated security risks throughout the software development lifecycle with 81% less effort.** This is because GitLab enabled them to integrate security protocols and scans throughout all stages of the software development lifecycle, simplifying how they maintain stringent security standards. As security testing and remediation are built into pipelines, teams reduce average response times and the risk of issues reaching production. \n\n# **Experience DevSecOps in action**\n\nWith a 483% ROI, a rapid payback period, and countless success stories, GitLab is an invaluable tool for enterprises looking to transform their software development processes.\n\n> To explore how GitLab can benefit your organization, download the full [Forrester Consulting “Total Economic Impact of GitLab Ultimate” study today](https://about.gitlab.com/resources/study-forrester-tei-gitlab-ultimate/).\n\n**Methodology**  \n*For the study, Forrester interviewed four GitLab Ultimate customers across industries, including finance, defense, and research, and created a composite organization to represent the aggregated results of these interviews. The composite organization is expected to adopt GitLab Ultimate across all teams in a three-year period.*\n\n*The composite organization is a $5 billion company with 5,000 employees, with 40% involved in software delivery and 50% of annual revenue driven by software development. Their goals are to consolidate multiple tools into a single, integrated platform, enhance developer productivity, ensure compliance with industry regulations and internal policies, and strengthen security throughout the development lifecycle.*\n\n*1. Based on summary data from customer interviews; not applicable to the composite organization results.*",[9,2239,1133,693],{"slug":2436,"featured":91,"template":698},"gitlab-ultimates-total-economic-impact-483-roi-over-3-years","content:en-us:blog:gitlab-ultimates-total-economic-impact-483-roi-over-3-years.yml","Gitlab Ultimates Total Economic Impact 483 Roi Over 3 Years","en-us/blog/gitlab-ultimates-total-economic-impact-483-roi-over-3-years.yml","en-us/blog/gitlab-ultimates-total-economic-impact-483-roi-over-3-years",{"_path":2442,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2443,"content":2448,"config":2455,"_id":2457,"_type":14,"title":2458,"_source":16,"_file":2459,"_stem":2460,"_extension":19},"/en-us/blog/gitlab-webhooks-get-smarter-with-self-healing-capabilities",{"title":2444,"description":2445,"ogTitle":2444,"ogDescription":2445,"noIndex":6,"ogImage":708,"ogUrl":2446,"ogSiteName":685,"ogType":686,"canonicalUrls":2446,"schema":2447},"GitLab Webhooks get smarter with self-healing capabilities","Introducing changes to webhook self-healing behavior, which reduce manual intervention and improve reliability. Discover the impact on your integrations and how to prepare.","https://about.gitlab.com/blog/gitlab-webhooks-get-smarter-with-self-healing-capabilities","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"GitLab Webhooks get smarter with self-healing capabilities\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Magdalena Frankiewicz\"}],\n        \"datePublished\": \"2024-11-14\",\n      }",{"title":2444,"description":2445,"authors":2449,"heroImage":708,"date":2450,"body":2451,"category":779,"tags":2452,"updatedDate":2454},[1639],"2024-11-14","We're excited to announce upcoming changes to how GitLab handles webhooks, aimed at improving reliability and reducing manual intervention. These changes will affect GitLab.com users in the coming weeks. For GitLab Self-Managed users, the current auto-disabling webhook behavior is behind an existing [ops flag `auto_disabling_webhooks`](https://docs.gitlab.com/ee/user/project/integrations/webhooks.html#auto-disabled-webhooks). The changes described here will be introduced behind the same feature flag.\n\nThis improvement is the result of a valuable community contribution by [Phawin Khongkhasawan](https://gitlab.com/lifez), exemplifying the power of our open source community in driving GitLab forward.\n\n## What's changing?\n\n- Currently, webhooks that result in 4xx errors become permanently disabled after multiple failures. With this update, all webhooks, regardless of the error type (4xx or 5xx), will have the ability to self-heal.\n- Failing webhooks will be temporarily disabled with an increasing backoff period, up to a maximum of 1 day. After a webhook fails for 40 times successively, it becomes permanently disabled.\n- All types of errors (4xx, 5xx, network errors, etc.) will be treated the same way, allowing for more predictable behavior and easier troubleshooting.\n- Webhooks that are currently permanently disabled will be migrated to be temporarily disabled with 40 failures, so they will remain permanently disabled.\n\n## Why this change matters\n\nReduced manual intervention: You'll no longer need to manually re-enable webhooks that have been disabled due to temporary issues.\n* **Improved reliability:** Webhooks will automatically attempt to recover from transient errors, ensuring your integrations remain functional.\n* **Better handling of temporary issues:** This change accounts for scenarios like temporary outages, deployments, or configuration changes that might cause temporary webhook failures.\n\n## What you need to do\n\n**1. Review your webhooks:** Take this opportunity to review your existing webhooks. If you have any that you no longer need, consider deleting them.\n\n**2. Update your monitoring:** If you rely on webhook status for monitoring, update your processes to account for the new behavior where webhooks may self-heal.\n\n**3. Test your integrations:** Once the change is rolled out, test your integrations to ensure they behave as expected with the new webhook handling.\n\n## Timeline and rollout\n\nThis feature is expected to be rolled out in GitLab 17.11.\n- For GitLab.com users, the changes will be applied automatically.\n- For Self-Managed and Dedicated users, the changes will only affect instances that have the auto_disabling_webhooks ops flag enabled.\n\n## Feedback and support\n\nWe value your feedback! If you encounter any issues or have suggestions regarding this change, please comment on our [webhook feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/503733).\n\nFor any questions or concerns, please reach out to [GitLab Support](https://about.gitlab.com/support/) or consult our [webhooks documentation](https://docs.gitlab.com/ee/user/project/integrations/webhooks.html).\n\nStay tuned for more updates, and thank you for being a part of the GitLab community!",[269,2453,9,695],"contributors","2025-03-24",{"slug":2456,"featured":6,"template":698},"gitlab-webhooks-get-smarter-with-self-healing-capabilities","content:en-us:blog:gitlab-webhooks-get-smarter-with-self-healing-capabilities.yml","Gitlab Webhooks Get Smarter With Self Healing Capabilities","en-us/blog/gitlab-webhooks-get-smarter-with-self-healing-capabilities.yml","en-us/blog/gitlab-webhooks-get-smarter-with-self-healing-capabilities",{"_path":2462,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2463,"content":2469,"config":2474,"_id":2476,"_type":14,"title":2477,"_source":16,"_file":2478,"_stem":2479,"_extension":19},"/en-us/blog/guide-to-fulfilling-soc-2-security-requirements-with-gitlab",{"title":2464,"description":2465,"ogTitle":2464,"ogDescription":2465,"noIndex":6,"ogImage":2466,"ogUrl":2467,"ogSiteName":685,"ogType":686,"canonicalUrls":2467,"schema":2468},"Guide to fulfilling SOC 2 security requirements with GitLab","Understand the application security features in the GitLab DevSecOps platform that map to System and Organization Controls 2 requirements.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099576/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_1172300481_IGPi3TS4VzFgcqhvEdBlR_1750099575518.jpg","https://about.gitlab.com/blog/guide-to-fulfilling-soc-2-security-requirements-with-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Guide to fulfilling SOC 2 security requirements with GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2025-01-22\",\n      }",{"title":2464,"description":2465,"authors":2470,"heroImage":2466,"date":2471,"body":2472,"category":693,"tags":2473},[1599],"2025-01-22","For businesses that handle sensitive customer information, achieving SOC 2\n(System and Organization Controls 2) compliance is not just a good practice\n— it's often a necessity. SOC 2 is a rigorous auditing standard developed by\nthe American Institute of Certified Public Accountants that assesses a\nservice organization's controls related to security, availability,\nprocessing integrity, confidentiality, and privacy.\n\n\nWhile SOC 2 is not legally mandated, it has become increasingly important,\nin part due to breaches consistently seen in news headlines. Obtaining SOC 2\ncompliance allows customers to build trust with service organizations\nbecause they know their data is being properly stored and security controls\nhave been assessed by a third party.\n\n\nIn this guide, we'll review the requirements for obtaining SOC 2 compliance\nand how GitLab can help your organization meet the highest standards for\napplication security.\n\n\n## What requirements are set by SOC 2\n\n\nThe compliance process involves an audit by an independent auditor who\nevaluates the design and operating effectiveness of an organization's\ncontrols. This process can be very costly, and many organizations are not\nsufficiently prepared before an audit. With the SOC 2 audit process\ntypically taking close to a year, it is important to establish an efficient\npre-audit process.\n\n\nTo obtain SOC 2 compliance, an organization must meet requirements based on\nthe Trust Services Criteria:\n\n\n| Criteria | Requirements |\n\n| :---- | :---- |\n\n| Security | - Implement controls to protect against unauthorized access\n\u003Cbr> - Establish procedures for identifying and mitigating risks\u003Cbr> - Set\nup systems for detecting and addressing security incidents |\n\n| Availability | - Ensure systems are accessible for operation as agreed\u003Cbr>\n- Monitor current usage and capacity \u003Cbr> - Identify and address\nenvironmental threats that could affect system availability |\n\n| Process integrity | - Maintain accurate records of system inputs and\noutputs \u003Cbr> - Implement procedures to quickly identify and correct system\nerrors \u003Cbr> - Define processing activities to ensure products and services\nmeet specifications |\n\n| Confidentiality | - Identify and protect confidential information \u003Cbr> -\nEstablish policies for data retention periods \u003Cbr> - Implement secure\nmethods for destroying confidential data after retention periods expire |\n\n| Privacy | - Obtain consent before collecting sensitive personal\ninformation \u003Cbr> - Communicate privacy policies clearly and in plain\nlanguage \u003Cbr> - Collect data only through legal means and from reliable\nsources |\n\n\u003Cbr>\n\n\nNote that these requirements are not one-time achievements, but rather a\ncontinuous process. Auditors will require control effectiveness over time.\n\n\n## How to achieve and maintain the security requirements\n\n\nGitLab provides several features off the board to get you started with\nassuring SOC 2 security needs are met:\n\n\n| Security Requirement | Addressing Feature |\n\n| :---- | :---- |\n\n| Implement controls to protect against unauthorized access | - Confidential\nIssues and Merge Requests \u003Cbr> - Custom Roles and Granular Permissions \u003Cbr>\n- Security Policies \u003Cbr> - Verified Commit \u003Cbr> - Signed Container Images\n\u003Cbr> - CodeOwners \u003Cbr> - Protected Branches |\n\n| Set up systems for detecting and addressing security incidents | -\nVulnerability Scanning \u003Cbr> - Merge Request Security Widget \u003Cbr> -\nVulnerability Insights Compliance Center \u003Cbr> - Audit Events \u003Cbr> -\nVulnerability Report Dependency List \u003Cbr> - AI: Vulnerability Explanation\n\u003Cbr> - AI: Vulnerability Resolution |\n\n| Establish procedures for identifying and mitigating risks | All the above\ntools can be used by a security team to establish a procedure around what to\ndo when security vulnerabilities are identified and how they are mitigated.\n|\n\n\u003Cbr>\n\nLet’s go through each section and highlight the security features that\naddress these requirements. Note that a [GitLab Ultimate\nsubscription](https://about.gitlab.com/free-trial/) and the correct Role and\nPermissions are required to access many of the features listed. Be sure to\ncheck out the appropriate documentation for more information.\n\n\n## Implement controls to protect against unauthorized access\n\n\nImplementing robust access controls is essential for protecting an\norganization's assets, ensuring regulatory compliance, maintaining\noperational continuity, and fostering trust. GitLab allows you to implement\ncontrols to follow the [principle of least\nprivilege](https://about.gitlab.com/blog/the-ultimate-guide-to-least-privilege-access-with-gitlab/),\nsecuring against unauthorized access. I will briefly cover:\n\n\n* [Security policies](#security-policies)  \n\n* [Custom roles and granular\npermissions](#custom-roles-and-granular-permissions)  \n\n* [Branch protections and CodeOwners](#branch-protections-and-codeowners)  \n\n* [Verified commits](#verified-commits)\n\n\n### Security policies\n\n\nGitLab's security policies, known as guardrails, enable security and\ncompliance teams to implement consistent controls across their organization,\nhelping prevent security incidents, maintain compliance standards, and\nreduce risk by automatically enforcing security best practices at scale.\n\n\n![Merge request approval policy in\naction](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099597/Blog/Content%20Images/Blog/Content%20Images/merge_request_approval_policy_aHR0cHM6_1750099596925.png)\n\n\n\u003Ccenter>\u003Ci>Merge request approval policy in action\u003C/i>\u003C/center>\u003Cbr>\n\n\nThe following policy types are available:\n\n\n* Scan execution policy: Enforce security scans, either as part of the\npipeline or on a specified schedule  \n\n* Merge request approval policy: Enforce project-level settings and approval\nrules based on scan results  \n\n* Pipeline execution policy: Enforce CI/CD jobs as part of project\npipelines  \n\n* Vulnerability management policy: Automate vulnerability management\nworkflows\n\n\nHere is an example of ensuring compliance with the pipeline execution\npolicy:\n\n\n1. Create a project that houses multiple compliance jobs. An example of a\njob can be to check permissions of files that are deployed. These jobs\nshould be generic enough that they can be applied to multiple applications.\n\n2. Limit the project's permissions to only security/compliance officers;\ndon’t allow developers to remove jobs. This allows for separation of duties.\n\n3. Inject the compliance jobs in batch to the projects where they are\nrequired. Force them to run no matter what, but allow approval from team\nlead to not block development. This will ensure compliance jobs are always\nrun and cannot be removed by developers, and that your environment remains\ncompliant.\n\n\n> ##### Learn how to create security policies with our [security policy\ndocumentation](https://docs.gitlab.com/ee/user/application_security/policies/).\n\n\n### Custom roles and granular permissions\n\n\nCustom permissions in GitLab allow organizations to create fine-grained\naccess controls beyond the standard role-based permissions, providing\nbenefits such as:\n\n\n* more precise access control  \n\n* better security compliance  \n\n* reduced risk of accidental access  \n\n* streamlined user management  \n\n* support for complex organizational structures\n\n\n![GitLab custom\nroles](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099597/Blog/Content%20Images/Blog/Content%20Images/custom_roles_aHR0cHM6_1750099596926.png)\n\n\n\u003Ccenter>\u003Ci>Roles and permissions settings, including custom\nroles\u003C/i>\u003C/center>\n\n\n> ##### Learn how to create custom roles with granular permissions using our\n[custom role\ndocumentation](https://docs.gitlab.com/ee/user/custom_roles.html).\n\n\n### Branch protections and CodeOwners\n\n\nGitLab helps you further control who can change your code using two key\nfeatures:\n\n* Branch Protection, which lets you set rules about who can update specific\nbranches – like requiring approval before merging changes.\n\n* Code Ownership, which automatically finds the right people to review code\nchanges by matching files to their designated owners.\n\n\nTogether, these features help keep your code secure and high-quality by\nmaking sure the right people review and approve changes.\n\n\n![Protected\nbranches](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099597/Blog/Content%20Images/Blog/Content%20Images/protected_branches_aHR0cHM6_1750099596928.png)\n\n\n\u003Ccenter>\u003Ci>Protected branch settings\u003C/i>\u003C/center>\n\n\n> ##### Learn how to create protected branches along with CodeOwners using\n[protected\nbranch](https://docs.gitlab.com/ee/user/project/repository/branches/protected.html)\nand [codeowner](https://docs.gitlab.com/ee/user/project/codeowners/)\ndocumentation.\n\n\n### Verified commits\n\n\nWhen you sign your commits digitally, you prove they really came from you,\nnot someone pretending to be you. Think of a digital signature like a unique\nstamp that only you can create. When you upload your public GPG key to\nGitLab, it can check this stamp. If the stamp matches, GitLab marks your\ncommit as `Verified`. You can then set up rules to reject commits that\naren't signed, or block all commits from users who haven't verified their\nidentity.\n\n\n![Commit signed with verified\nsignature](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099597/Blog/Content%20Images/Blog/Content%20Images/signed_commit_aHR0cHM6_1750099596929.png)\n\n\n\u003Ccenter>\u003Ci>Commit signed with verified signature\u003C/i>\u003C/center>\u003Cbr>\n\n\nCommits can be signed with:\n\n\n* SSH key  \n\n* GPG key  \n\n* Personal x.509 certificate\n\n\n> ##### Learn more about verified commits with our [signed commits\ndocumentation](https://docs.gitlab.com/ee/user/project/repository/signed_commits/).\n\n\n## Set up systems for detecting and addressing security incidents\n\n\nSetting up systems for detecting and addressing security incidents is vital\nfor maintaining a robust security posture, ensuring regulatory compliance,\nminimizing potential damages, and enabling organizations to respond\neffectively to the ever-evolving threat landscape.\n\n\nGitLab provides security scanning and vulnerability management for the\ncomplete application lifecycle. I will briefly cover:\n\n\n* [Security scanning and vulnerability\nmanagement](#security-scanning-and-vulnerability-management)  \n\n* [Software bill of materials](#software-bill-of-materials)  \n\n* [System auditing and security posture\nreview](#system-auditing-and-security-posture-review)\n\n* [Compliance and security posture\noversight](#compliance-and-security-posture-oversight)\n\n\n### Security scanning and vulnerability management\n\n\nGitLab provides a variety of different security scanners that cover the\ncomplete lifecycle of your application:\n\n\n* Static Application Security Testing (SAST)  \n\n* Dynamic Application Security Testing (DAST)\n\n* Container Scanning  \n\n* Dependency Scanning  \n\n* Infrastructure as Code (IaC) Scanning  \n\n* Coverage-guided Fuzzing\n\n* Web API Fuzzing\n\n\nThese scanners can be added to your pipeline via the use of templates. For\nexample, to run SAST and dependency scanning jobs in the test stage, simply\nadd the following to your .gitlab-ci.yml:\n\n\n```yaml  \n\nstages:  \n   - test\n\ninclude:  \n  - template: Jobs/Dependency-Scanning.gitlab-ci.yml  \n  - template: Jobs/SAST.gitlab-ci.yml  \n``` \n\n\nThese jobs are fully configurable via environment variables and using GitLab\njob syntax. Once a pipeline kicks off, the security scanners run and detect\nvulnerabilities in the diff between the current branch and the target\nbranch. The vulnerability can be seen in a merge request (MR), providing\ndetailed oversight before the code is merged to the target branch. The MR\nwill provide the following information on a vulnerability:\n\n\n* description  \n\n* status  \n\n* severity  \n\n* evidence  \n\n* identifiers  \n\n* URL (if applicable)  \n\n* request/response (if applicable)  \n\n* reproduction assets (if applicable)  \n\n* training (if applicable)  \n\n* code flow (if using advanced SAST)\n\n\n![MR view of introduced\nvulnerability](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099597/Blog/Content%20Images/Blog/Content%20Images/no_sql_injection_vulnerability_mr_view_aHR0cHM6_1750099596931.png)\n\n\n\u003Ccenter>\u003Ci>MR view of introduced vulnerability\u003C/i>\u003C/center>\u003Cbr>\n\n\nDevelopers can use this data to remediate vulnerabilities without slowing\ndown security team workflows. Developers can dismiss a vulnerability with\nreasoning, speeding up the review process, or they can create a confidential\nissue to track the vulnerability.\n\n\nIf the code in an MR is merged to the default (usually production-level)\nbranch, then the vulnerability report is populated with the security scanner\nresults. These results can be used by security teams to manage and triage\nthe vulnerabilities found in production.\n\n\n![Vulnerability report with Batch Status\nsetting](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099597/Blog/Content%20Images/Blog/Content%20Images/vulnerability_report_aHR0cHM6_1750099596936.png)\n\n\n\u003Ccenter>\u003Ci>Vulnerability report with Batch Status setting\u003C/i>\u003C/center>\u003Cbr>\n\n\nWhen clicking on a vulnerability description within the vulnerability\nreport, you are provided with the vulnerability page, which contains the\nsame vulnerability data as the MR, allowing for a single source of truth\nwhen assessing impact and performing remediation. From the vulnerability\npage, [GitLab Duo](https://about.gitlab.com/gitlab-duo/) AI features can be\nused to explain the vulnerability and also create an MR to remediate,\nspeeding up resolution time.\n\n\n> ##### Learn more about the security scanners included with GitLab and how\nto manage vulnerabilities in our [application security\ndocumentation](https://docs.gitlab.com/ee/user/application_security/).\n\n\n### Software bill of materials\n\n\nGitLab can create a detailed list of everything your software uses – kind of\nlike an ingredients list for your code. This list, called a software bill of\nmaterials\n([SBOM](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/)),\nshows you all the external code your project depends on, including the parts\nyou directly use and their own dependencies. For each item, you can see\nwhich version you're using, what license it has, and whether it has any\nknown security problems. This helps you keep track of what's in your\nsoftware and spot potential risks.\n\n\n![Group-level dependency list\n(SBOM)](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099597/Blog/Content%20Images/Blog/Content%20Images/sbom_aHR0cHM6_1750099596937.png)\n\n\n\u003Ccenter>\u003Ci>Group-level dependency list (SBOM)\u003C/i>\u003C/center>\n\n\n> ##### Learn how to access and use the dependency list with our [dependency\nlist\ndocumentation](https://docs.gitlab.com/ee/user/application_security/dependency_list/).\n\n\n### System auditing and security posture review\n\n\nGitLab keeps track of everything that happens in your system such as who\nmade changes, what they changed, and when they did it. Think of it like a\nsecurity camera for your code. This record helps you:\n\n\n* spot any suspicious activity  \n\n* show regulators you're following the rules  \n\n* figure out what happened if something goes wrong  \n\n* see how people are using GitLab\n\n\nAll of this information is stored in one place, making it easy to review and\ninvestigate when needed. For example, you can use audit events to track:\n\n\n* who changed the permission level of a particular user for a GitLab\nproject, and when  \n\n* who added a new user or removed a user, and when\n\n\n![Project-level audit\nevents](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099597/Blog/Content%20Images/Blog/Content%20Images/audit_events_aHR0cHM6_1750099596938.png)\n\n\n\u003Ccenter>\u003Ci>Project-level audit events\u003C/i>\u003C/center>\n\n\n> ##### Learn more about audit events, see the [audit events\ndocumentation](https://docs.gitlab.com/ee/user/compliance/audit_events.html).\n\n\n## Compliance and security posture oversight\n\n\nGitLab's Security Dashboard works like a control room that shows you all\nyour security risks in one place. Instead of checking different security\ntools separately, you can see all their findings together on one screen.\nThis makes it easy to spot and fix security problems across all your\nprojects.\n\n\n![Group-level Security\nDashboard](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099597/Blog/Content%20Images/Blog/Content%20Images/security_dashboard_aHR0cHM6_1750099596939.png)\n\n\u003Ccenter>\u003Ci>Group-level security dashboard\u003C/i>\u003C/center>\n\n\n> ##### Learn more about security dashboards with our [security dashboard\ndocumentation](https://docs.gitlab.com/ee/user/application_security/security_dashboard/).\n\n\n## Establish procedures for identifying and mitigating risks\n\n\nVulnerabilities go through a specific lifecycle. For example, a part of the\nprocedure can be to require approval for any vulnerable code to be merged to\nprotected branches using security policies. Then the procedure can state\nthat vulnerable code detected in production must be prioritized, assessed,\nremediated, and then validated: \n\n\n* The criteria for prioritization can be by the severity of the\nvulnerability provided by GitLab scanners.  \n\n* The assessment can be done using exploitation details provided by the AI:\nVulnerability Explanation.  \n\n* Once the vulnerability is remediated, then it can be validated using\nbuilt-in GitLab regression tests and scanners.\n\n\nWhile every organization's needs are different, leveraging GitLab as a\nplatform, risks can be quickly identified and addressed with reduced risk\nwhen compared to using a sprawl of disparate tools.\n\n\n### Best practices for SOC 2 compliance\n\n\n* Establish a strong security culture: Foster a culture of security\nawareness and accountability throughout your organization.  \n\n* Document everything: Maintain thorough documentation of policies,\nprocedures, and controls.  \n\n* Automate where possible: Use automation tools to streamline compliance\nprocesses and reduce errors.  \n\n* Communicate effectively: Keep stakeholders informed about your compliance\nefforts.  \n\n* Seek expert guidance: Consider partnering with a qualified consultant to\nassist with your SOC 2 journey.\n\n\nAchieving SOC 2 compliance is a significant undertaking, but the benefits\nare undeniable. By demonstrating your commitment to application security and\noperational excellence, you can build trust with customers, enhance your\nreputation, and gain a competitive edge in the marketplace.\n\n\n## Read more\n\n\nTo learn more about GitLab and how we can help achieve SOCv2 compliance\nwhile enhancing your security posture, check out the following resources:\n\n\n* [GitLab Ultimate](https://about.gitlab.com/pricing/ultimate/)  \n\n* [GitLab Security and Compliance\nSolutions](https://about.gitlab.com/solutions/security-compliance/)  \n\n* [GitLab Application Security\nDocumentation](https://docs.gitlab.com/ee/user/application_security/)  \n\n* [GitLab DevSecOps Tutorial\nProject](https://gitlab.com/gitlab-da/tutorials/security-and-governance/devsecops/simply-vulnerable-notes)\n",[719,693,9,695,717],{"slug":2475,"featured":91,"template":698},"guide-to-fulfilling-soc-2-security-requirements-with-gitlab","content:en-us:blog:guide-to-fulfilling-soc-2-security-requirements-with-gitlab.yml","Guide To Fulfilling Soc 2 Security Requirements With Gitlab","en-us/blog/guide-to-fulfilling-soc-2-security-requirements-with-gitlab.yml","en-us/blog/guide-to-fulfilling-soc-2-security-requirements-with-gitlab",{"_path":2481,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2482,"content":2487,"config":2494,"_id":2496,"_type":14,"title":2497,"_source":16,"_file":2498,"_stem":2499,"_extension":19},"/en-us/blog/happy-birthday-secure-by-design",{"title":2483,"description":2484,"ogTitle":2483,"ogDescription":2484,"noIndex":6,"ogImage":2311,"ogUrl":2485,"ogSiteName":685,"ogType":686,"canonicalUrls":2485,"schema":2486},"Happy birthday, Secure by Design!","The U.S. government's initiative to ensure greater security in software products turns one. Find out what GitLab has done to align with this critical effort.","https://about.gitlab.com/blog/happy-birthday-secure-by-design","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Happy birthday, Secure by Design!\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Joel Krooswyk\"}],\n        \"datePublished\": \"2024-04-30\",\n      }",{"title":2483,"description":2484,"authors":2488,"heroImage":2311,"date":2490,"body":2491,"category":693,"tags":2492},[2489],"Joel Krooswyk","2024-04-30","When the Cybersecurity and Infrastructure Security Agency (CISA) first published its [Secure by Design](https://www.cisa.gov/securebydesign) software protection initiative on April 13, 2023, the industry paid close attention. The initiative urges all software manufacturers to take the steps necessary to ensure that the products they ship are, in fact, secure by design. At GitLab, we quickly assessed our alignment with the initiative and over the past year have continued to innovate in accordance with CISA's guidelines.\n\nCISA's Secure by Design introduced three software security principles:\n\n1. Take ownership of customer security outcomes.\n\n2. Embrace radical transparency and accountability.\n\n3. Build organizational structure and leadership to achieve these goals.\n\n## A year of government guidance  \n\nThe U.S. government has produced significant guidance throughout the past year that reflects the Secure by Design theme. Here are just a few highlights:\n\n* August 2023: ONCD in partnership with several other agencies kicked off the [OS3i Initiative](https://www.whitehouse.gov/oncd/briefing-room/2023/08/10/fact-sheet-office-of-the-national-cyber-director-requests-public-comment-on-open-source-software-security-and-memory-safe-programming-languages/) to prioritize focus areas related to open source software security.\n* August 2023: NIST produced [SP 800-204D ](https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-204D.pdf)to provide practical software supply chain security strategies for DevSecOps CI/CD pipelines.\n* October 2023: CISA released a second iteration of the [Secure by Design](https://www.cisa.gov/sites/default/files/2023-10/SecureByDesign_1025_508c.pdf) document.\n* October 2023: The [AI Executive Order](https://www.whitehouse.gov/briefing-room/statements-releases/2023/10/30/fact-sheet-president-biden-issues-executive-order-on-safe-secure-and-trustworthy-artificial-intelligence/) was issued by the Biden Administration. Since then, AI engagement guidelines have been produced by most government agencies.\n* December 2023: CISA produced [Memory Safe Roadmap guidance](https://www.cisa.gov/sites/default/files/2023-12/The-Case-for-Memory-Safe-Roadmaps-508c.pdf).\n* February 2024: NIST released the [CyberSecurity Framework 2.0](https://www.nist.gov/news-events/news/2024/02/nist-releases-version-20-landmark-cybersecurity-framework).\n* March 2024: CISA and OMB published the [Secure Software Development Attestation Form](https://www.cisa.gov/secure-software-attestation-form) and opened a [repository](https://www.cisa.gov/news-events/news/cisa-publishes-repository-software-attestation-and-artifacts) for collection of the attestations.\n\n## How GitLab has evolved with the Secure by Design initiative\n\nGitLab has also continued to grow in alignment with the Secure by Design initiative over the past year. Here are some examples.\n\n### GitLab signed the Secure by Design Pledge\n\nGitLab is proud to have signed the CISA [Secure by Design Pledge](https://www.cisa.gov/securebydesign/pledge).\n\n\"The Secure by Design concepts are well-aligned with GitLab's core values. As the most comprehensive AI-powered DevSecOps platform, GitLab offers its unwavering support towards CISA’s efforts to instill a Secure by Design mindset in software manufacturers. GitLab is proud to make the Secure by Design Pledge, and we firmly believe these efforts will help us enable everyone to innovate and succeed on a safe, secure, and trusted DevSecOps platform,\" said GitLab Chief Information Security Officer Josh Lemos.\n\n### \"Secure by default\" practices\n\nConfiguring and securing installations and users can be a challenge. GitLab developed granular user access with [custom user roles](https://docs.gitlab.com/ee/user/custom_roles.html) and [customizable permissions](https://docs.gitlab.com/ee/user/custom_roles/abilities.html). Management of [tokens](https://docs.gitlab.com/ee/security/token_overview.html), [API service accounts](https://docs.gitlab.com/ee/user/profile/service_accounts.html), and [credentials](https://docs.gitlab.com/ee/administration/credentials_inventory.html) have been in focus with continuous improvements and more rigorous authentication security capabilities throughout the year. \n\n### Secure software development practices\n\nWith every release, GitLab has incrementally enhanced scanning accuracy, coverage, and capabilities across our entire suite of security analyzers.\n\n- Some [scan results are presented in developer context](https://docs.gitlab.com/ee/user/application_security/#gitlab-workflow-extension-for-vs-code) (like the IDE) simplify workflows and shift security further left.\n\n- [CI/CD pipeline](https://docs.gitlab.com/ee/ci/pipelines/) capabilities, which have been expanded and simplified, ensure better functionality while also bolstering security and compliance with enforcement and policies.\n\n- [Vulnerability management](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/) provides better views at scale, improved filtering, and more options to take action against vulnerability findings.\n\n- [Artifact attestations](https://docs.gitlab.com/ee/ci/yaml/signing_examples.html) provide a trustworthy authentication of each software artifact.\n\n### Secure business practices\n\nEach GitLab release demonstrated increased focus on compliance. Enhanced [auditing](https://docs.gitlab.com/ee/administration/audit_event_reports.html) and [event streaming](https://docs.gitlab.com/ee/administration/audit_event_streaming/) provide accountability across the entire SDLC. Compliance teams are now better equipped to proactively align to requirements, thanks to increased [policy management](https://docs.gitlab.com/ee/administration/compliance.html#policy-management), [workflow automation](https://docs.gitlab.com/ee/administration/compliance.html#compliant-workflow-automation), visibility via [compliance reporting](https://docs.gitlab.com/ee/user/compliance/compliance_center/), and [exportability of data](https://docs.gitlab.com/ee/user/compliance/compliance_center/compliance_standards_adherence_dashboard.html#export-compliance-standards-adherence-report-for-projects-in-a-group). \n\n## GitLab's Secure by Design features\n\nHere are some of the features and capabilities that align with Secure by Design.\n\n### SBOMs\n\nGitLab’s dynamic [software bill of materials](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/) focus improved SBOM generation while adding third-party SBOM intake capabilities. This also led to the ability to combine SBOMs, as well as to provide full attestation for standardized SBOM artifacts. Enhancements such as cross-project dependency visibility as well as dependency graphs enabled a better view of SBOM risk at scale. Continuous vulnerability scanning for SBOMs was also added during the past year, providing continuous insights for emergent risks for projects that are not under continuous development – no CI/CD pipeline required.\n\n### Vulnerability management\n\nNotable improvements can be seen in vulnerability management as GitLab product updates increased visibility to vulnerabilities at scale, added flexibility to [filtering](https://docs.gitlab.com/ee/user/application_security/vulnerability_report/#group-vulnerabilities), and added [remediation detail](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#vulnerability-dismissal-reasons) options. With [GitLab Duo](https://about.gitlab.com/gitlab-duo/), our AI-powered suite of features, AI-assisted vulnerability remediation is taking a dramatic step forward.\n\n### AI-powered workflows\n\nSpeaking of AI, we deployed many [GitLab Duo features](https://about.gitlab.com/gitlab-duo/#features) during the past year that can help expedite Secure by Design execution, including:\n\n1. Code Suggestions - Use natural language processing to generate new code.\n2. Code Explanation - Discover what that uncommented code does in order to  properly maintain code bases and provide contextually aware product updates.\n3. Code Refactoring - Refactor legacy code bases into new libraries, functions, or memory-safe languages.\n4. Vulnerability Explanation - Understand the impact of a vulnerability and why it is creating risk to enable more accurate and thorough remediation.\n5. Vulnerability Resolution - Automatically resolve vulnerabilities to save significant amounts of time.\n6. Root Cause Analysis - Determine the root cause for a pipeline failure and failed CI/CD build.\n\n### Radical transparency\n\nGitLab continues to embrace its Transparency value by creating the [GitLab Trust Center](https://trust.gitlab.com/) and the [GitLab AI Transparency Center](https://about.gitlab.com/ai-transparency-center/). These public-facing pages provide radical transparency to GitLab's values, ethics, feature details, and compliance statements – including a [NIST Secure Software Development Framework](https://csrc.nist.gov/projects/ssdf) self-attestation letter.\n\n## What's next?\n\nAs Secure by Design enters its second year, we look forward to additional guidance and initiatives from CISA and other government agencies that will provide users around the world with more securely developed software.\n\n> Want to test-drive GitLab's security features? [Try GitLab Ultimate for free](https://about.gitlab.com/free-trial/devsecops/).",[693,9,951,2493,186],"zero trust",{"slug":2495,"featured":91,"template":698},"happy-birthday-secure-by-design","content:en-us:blog:happy-birthday-secure-by-design.yml","Happy Birthday Secure By Design","en-us/blog/happy-birthday-secure-by-design.yml","en-us/blog/happy-birthday-secure-by-design",{"_path":2501,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2502,"content":2508,"config":2514,"_id":2516,"_type":14,"title":2517,"_source":16,"_file":2518,"_stem":2519,"_extension":19},"/en-us/blog/hosted-runners-for-gitlab-dedicated-now-in-limited-availability",{"title":2503,"description":2504,"ogTitle":2503,"ogDescription":2504,"noIndex":6,"ogImage":2505,"ogUrl":2506,"ogSiteName":685,"ogType":686,"canonicalUrls":2506,"schema":2507},"Hosted runners for GitLab Dedicated: Now in limited availability"," Simplify CI/CD infrastructure management with hosted runners for GitLab Dedicated, a fully managed solution that handles all aspects of runner infrastructure.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749664751/Blog/Hero%20Images/AdobeStock_640077932.jpg","https://about.gitlab.com/blog/hosted-runners-for-gitlab-dedicated-now-in-limited-availability","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Hosted runners for GitLab Dedicated: Now in limited availability\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Gabriel Engel\"}],\n        \"datePublished\": \"2025-01-23\",\n      }",{"title":2503,"description":2504,"authors":2509,"heroImage":2505,"date":2511,"body":2512,"category":717,"tags":2513},[2510],"Gabriel Engel","2025-01-23","We are excited to announce that hosted runners for [GitLab Dedicated](https://about.gitlab.com/dedicated/), our single-tenant SaaS solution, have transitioned from [beta](https://about.gitlab.com/blog/hosted-runners-for-gitlab-dedicated-available-in-beta/) to limited availability, marking a significant milestone in our commitment to simplifying CI/CD infrastructure management for our customers.\n\n## Streamlined CI/CD infrastructure management\n\nManaging runner infrastructure has traditionally been a complex undertaking, requiring dedicated resources and expertise to maintain optimal performance. Hosted runners for GitLab Dedicated eliminates these challenges by providing a fully managed solution that handles all aspects of runner infrastructure. This allows your teams to focus on what matters most – building and deploying great software.\n\n## Key benefits\n\n### Reduced operational overhead\n\nBy choosing hosted runners, you can eliminate the complexity of provisioning, maintaining, and securing your runner infrastructure. Our fully managed service handles all aspects of runner operations, from deployment to updates and security patches.\n\n### Automatic scaling\n\nHosted runners automatically scale to match your CI/CD demands, ensuring consistent performance during high-traffic periods and for large-scale projects. This dynamic scaling capability means you'll always have runners available to pick up your CI/CD jobs and ensure optimal efficiency of your development teams.\n\n### Cost optimization\n\nWith hosted runners, you only pay for the resources you actually use. This consumption-based model eliminates the need to maintain excess capacity for peak loads, potentially reducing your infrastructure costs while ensuring resources are available when needed.\n\n### Enterprise-grade security\n\nFollowing the same security principles as GitLab Dedicated, hosted runners provide complete isolation from other tenants and are secure by default. Jobs are executed in fully-isolated VMs with no inbound traffic allowed. This means you can maintain the highest security standards without the complexity of implementing and maintaining security measures yourself.\n\n## Introducing native Arm64 support\n\nOur hosted runners now include native Arm64 support in addition to our existing x86-64 runners, offering significant advantages for modern development workflows.\n\n### Enhanced performance for Arm-based development\n\nNative Arm64 runners enable you to build, test, and deploy Arm-based applications in their native environment, ensuring optimal performance and compatibility. Teams developing Docker images or services targeting Arm-based cloud platforms can see build times cut significantly, accelerating their development cycles and deployments.\n\n### Cost-efficient computing\n\nArm-based runners can significantly reduce your computing costs, due to their efficient processing architecture and lower cost per minute. For compatible jobs, this means more affordable pipeline execution.\n\n### Native building capabilities\n\nWith support for both x86-64 and Arm64 architectures, you can:\n- build and test applications natively on either architecture\n- create multi-architecture container images efficiently\n- validate cross-platform compatibility in your CI/CD pipeline\n- optimize your delivery pipeline for specific target platforms\n- eliminate the performance overhead of emulation when building for Arm targets\n\nThis dual-architecture support ensures you have the flexibility to choose the right environment for each specific workload while maintaining a consistent and efficient CI/CD experience across all your projects.\n\n## Available runner sizes\n\nWe're expanding our runner offerings to include both x86-64 and Arm64 architectures with a range of configurations. The following sizes are available:\n\n| Size | vCPUs | Memory | Storage |\n|------|--------|---------|----------|\n| Small    | 2      | 8 GB    | 30 GB    |\n| Medium    | 4      | 16 GB   | 50 GB    |\n| Large    | 8      | 32 GB   | 100 GB   |\n| X-Large   | 16     | 64 GB   | 200 GB   |\n| 2X-Large  | 32     | 128 GB  | 200 GB   |\n\nThis expanded size support allows you to optimize your CI/CD pipeline performance based on your application's specific requirements.\n\n## What's next for hosted runners\n\nWe plan to release hosted runners in general availability in May 2025. The release includes compute minute visualization to help you better understand and control your CI/CD usage across your organization.\n\nWe'll be expanding our hosted runners offering with several new features coming later this year:\n- Network controls for enhanced security and compliance\n- MacOS runners to support application development for the Apple ecosystem\n- Windows runners for .NET and Windows-specific workloads\n\nThese additions will provide even more flexibility and coverage for your CI/CD needs, allowing you to consolidate all your build and test workflows on GitLab Dedicated hosted runners.\n\nReady to simplify your CI/CD infrastructure? Contact your GitLab representative or [reach out to our sales team](https://about.gitlab.com/dedicated/) to learn more about hosted runners for GitLab Dedicated.\n",[951,9,695,1133,109,186],{"slug":2515,"featured":6,"template":698},"hosted-runners-for-gitlab-dedicated-now-in-limited-availability","content:en-us:blog:hosted-runners-for-gitlab-dedicated-now-in-limited-availability.yml","Hosted Runners For Gitlab Dedicated Now In Limited Availability","en-us/blog/hosted-runners-for-gitlab-dedicated-now-in-limited-availability.yml","en-us/blog/hosted-runners-for-gitlab-dedicated-now-in-limited-availability",{"_path":2521,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2522,"content":2528,"config":2534,"_id":2536,"_type":14,"title":2537,"_source":16,"_file":2538,"_stem":2539,"_extension":19},"/en-us/blog/how-devsecops-drives-business-success",{"title":2523,"description":2524,"ogTitle":2523,"ogDescription":2524,"noIndex":6,"ogImage":2525,"ogUrl":2526,"ogSiteName":685,"ogType":686,"canonicalUrls":2526,"schema":2527},"How DevSecOps drives business success","Learn the benefits of DevSecOps, including how it drives ROI.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663975/Blog/Hero%20Images/devsecopssurvey.png","https://about.gitlab.com/blog/how-devsecops-drives-business-success","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How DevSecOps drives business success\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sharon Gaudin\"}],\n        \"datePublished\": \"2023-07-24\",\n      }",{"title":2523,"description":2524,"authors":2529,"heroImage":2525,"date":2530,"body":2531,"category":1277,"tags":2532},[1498],"2023-07-24","\nDevSecOps is an evolution of DevOps — but it’s more than just adding the word “Security” in between \"Development\" and \"Operations.\" It's about bringing everyone into the security team and [building security](/the-source/security/how-to-strengthen-security-by-applying-devsecops-principles/) into the entire software development lifecycle. Although DevSecOps is likely talked about more in coding circles than in boardrooms, the benefits of DevSecOps extend to the entire organization, helping teams deliver software faster without sacrificing quality or security.\n\nBut what exactly are the business benefits of DevSecOps? In this blog post, we'll dive into the top ways DevSecOps can help organizations drive business results and ROI.\n\n> Want to dig deeper into the basics of DevSecOps and how to use it to drive business success? [Download our guide to learn more](https://page.gitlab.com/resources-ebook-devsecops-success.html?utm_campaign=devsecopsplat&utm_content=ebookdevsecopssuccess).\n\n## How DevSecOps can benefit businesses\nHere are a few of the ways adopting DevSecOps can benefit a business:\n* Moving to a single platform [reduces money spent on toolchain](/the-source/platform/devops-teams-want-to-shake-off-diy-toolchains-a-platform-is-the-answer/) licenses and management\n* Focusing on security helps secure the business and its customers\n* Increasing productivity leads to faster time to market, which means staying ahead of competitors\n* Increasing security enables DevOps teams to identify issues that could ultimately hurt the brand and break customer and partner trust\n* Catching and fixing security vulnerabilities reduces legal liability connected to breaches\n* Having DevOps teams work cross-functionally in a single tool fosters collaboration in the software development team and across the entire company\n* Managing access controls, policies, and audits in one platform eases and promotes compliance\n* Unifying with a platform makes it easier to be cloud agnostic, which makes a company more resistant to vendor outages\n* Increasing visibility into every component in the [software supply chain](https://about.gitlab.com/blog/the-ultimate-guide-to-software-supply-chain-security/) boosts security and reliability\n* Automating scanning takes human error out of the equation by reducing the chances of having vulnerabilities that hackers can exploit\n\n## How DevSecOps drives ROI\n“Central for any executive is ROI and if a decision will bring their business forward or backward,” says [Ayoub Fandi](https://gitlab.com/ayofan), senior field security engineer at GitLab. “A DevSecOps platform brings all of the features, security tools, and automation of DevSecOps together in a single app so there’s no buying, stitching, and maintaining a complicated jumble of tools. It also means a company isn’t paying for all of those licensing costs, subscription fees, and maintenance expenses. Each year, companies spend more and more on technology, so if they can find a way to reduce their spending on that, it’ll be very welcome to executives.”\n\nBy helping protect a company’s brand, enabling the organization to stay ahead in a competitive field and remain compliant, and giving executives visibility over what’s causing slowdowns or increasing velocity, DevSecOps can be a valuable business tool.\n\n## What is a DevSecOps platform?\nA [comprehensive DevSecOps platform](https://about.gitlab.com/platform/) brings all of the features, security controls, and automation of DevSecOps together in one end-to-end application, so there’s no buying and supporting a complicated jumble of tools.\n\n“If I were in a company’s buying seat, I would be looking at DevSecOps,” says Fandi. “Executives want to worry less about compliance issues and security breaches that could have a big impact on their revenue, their ability to grow the company, and customers’ confidence in doing business with them. With DevSecOps, they can worry less about all of that.”\n\n_Explore how DevSecOps and a DevSecOps platform can help organizations save money, boost competitiveness, increase security, and more. [Download our comprehensive guide to get started](https://page.gitlab.com/resources-ebook-devsecops-success.html?utm_campaign=devsecopsplat&utm_content=ebookdevsecopssuccess)._\n",[9,2533,951],"careers",{"slug":2535,"featured":6,"template":698},"how-devsecops-drives-business-success","content:en-us:blog:how-devsecops-drives-business-success.yml","How Devsecops Drives Business Success","en-us/blog/how-devsecops-drives-business-success.yml","en-us/blog/how-devsecops-drives-business-success",{"_path":2541,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2542,"content":2547,"config":2552,"_id":2554,"_type":14,"title":2555,"_source":16,"_file":2556,"_stem":2557,"_extension":19},"/en-us/blog/how-gitlab-helps-meet-nis2-requirements",{"title":2543,"description":2544,"ogTitle":2543,"ogDescription":2544,"noIndex":6,"ogImage":750,"ogUrl":2545,"ogSiteName":685,"ogType":686,"canonicalUrls":2545,"schema":2546},"How GitLab helps meet NIS2 requirements","The EU's NIS2 cybersecurity legislation focuses on resilience, incident response, and risk management. Learn how GitLab's DevSecOps platform helps meet these compliance requirements.","https://about.gitlab.com/blog/how-gitlab-helps-meet-nis2-requirements","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How GitLab helps meet NIS2 requirements\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Joseph Longo\"}],\n        \"datePublished\": \"2024-08-20\",\n      }",{"title":2543,"description":2544,"authors":2548,"heroImage":750,"date":2549,"body":2550,"category":693,"tags":2551},[2412],"2024-08-20","Government and regulatory bodies across the world are continuously seeking to enact new laws and requirements to address the growing risk posed by global digitization and the resulting cybersecurity threat landscape. The European Union's [NIS2 Directive](https://eur-lex.europa.eu/eli/dir/2022/2555) is legislation that aims to increase cybersecurity by focusing on core competencies, including resilience, incident response, and risk management.\n\nAs the most comprehensive AI-powered DevSecOps platform, GitLab is used globally to develop better software faster. GitLab’s diverse feature set makes it a unique and valuable asset, capable of addressing many of our customers’ [security and compliance](https://about.gitlab.com/solutions/security-compliance/) needs. Let’s explore how customers can leverage the GitLab platform to support their NIS2 compliance efforts.\n\n## Article 7: National Cybersecurity Strategy\n\n**Section 2(a) - addressing cybersecurity in the supply chain for ICT products and ICT services used by entities for the provision of their services.**\n\nNIS2 requires member states to adopt policies that address cybersecurity in the supply chain. While the policies are yet to be defined by each member state, GitLab’s [Secure](https://about.gitlab.com/features/?stage=secure) stage offers a suite of features that enable customers to manage the security of their offered services as well as their supply chain.\n\n- [Static Application Security Testing (SAST)](https://about.gitlab.com/features/?stage=secure#static_application_security_testing),  [Infrastructure as Code Security Scanning](https://docs.gitlab.com/ee/user/application_security/iac_scanning/), and [Dynamic Application Security Testing (DAST)](https://about.gitlab.com/features/?stage=secure#dynamic_application_security_testing) enable customers to scan their source code, infrastructure definition files, and running applications for vulnerabilities. Then, findings can be triaged and addressed in accordance with each organization’s processes.\n\n- GitLab’s [Container Scanning](https://about.gitlab.com/features/?stage=secure#container_scanning) feature helps customers identify known vulnerabilities in their container images. \n\n![Container scanning report](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674396/Blog/Content%20Images/Screenshot_2024-08-20_at_10.38.54.png)\n\n- For the supply chain, [Dependency Scanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/) helps identify known vulnerabilities in our customers’ application dependencies. [License Compliance](https://docs.gitlab.com/ee/user/compliance/license_approval_policies.html) serves as an extension of Dependency Scanning by validating whether the licenses of dependencies meet a set of criteria determined by the organization.\n\n- GitLab’s [Dependency List](https://docs.gitlab.com/ee/user/application_security/dependency_list/) also enables organizations to create a CycloneDX [software bill of materials (SBOM)](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/). To learn more, view [this tutorial on how to export an SBOM](https://docs.gitlab.com/ee/tutorials/export_sbom.html).\n\nTogether, these scanning features help create a holistic and strong testing suite to support robust application development and supply chain management processes. DAST, Dependency Scanning, and License Compliance are available with a GitLab Ultimate subscription.\n\n> [Try GitLab Ultimate free for 30 days](https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/blog&glm_content=default-saas-trial).\n\n## Article 21: Cybersecurity Risk Management Measures\n\nNIS2 requires member states to ensure that essential and important entities implement appropriate technical, organizational, and operational measures to properly manage risk and mitigate the impact that the users of their services incur as a result of incidents to their service.\n\n**Section 2(d) - supply chain security, including security-related aspects concerning the relationships between each entity and its direct suppliers or service providers.**\n\nAs noted above, GitLab’s [Secure](https://about.gitlab.com/features/?stage=secure) stage enables customers to implement a broad range of detective and responsive capabilities that improve the security of their code and address risk within their supply chains.\n\nThose features can be leveraged to address the requirements that member states will pass down as part of this section.\n\n**Section 2(e) - security in network and information systems acquisition, development and maintenance, including vulnerability handling and disclosure.**\n\nAs a comprehensive DevSecOps platform, GitLab’s features help secure our customers’ software delivery lifecycles from beginning to end.\n\n- GitLab’s [planning tools](https://about.gitlab.com/features/?stage=plan) can be used to support your project management efforts and ensure that information security is appropriately considered through all phases of a project’s lifecycle. To learn more, read [How GitLab can support your ISO 27001 compliance journey](https://about.gitlab.com/blog/how-gitlab-can-support-your-iso-compliance-journey/#organizational-controls).\n\n- Through features such as [Protected Branches](https://docs.gitlab.com/ee/user/project/protected_branches.html), [Merge Request Approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/), [Push Rules](https://docs.gitlab.com/ee/push_rules/push_rules.html), and [Signed Commits](https://docs.gitlab.com/ee/user/project/repository/signed_commits/), GitLab’s [Create Stage](https://about.gitlab.com/features/?stage=create) implements a secure foundation for developers to work from as they iterate and build secure code.\n\n![Signed commits](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674396/Blog/Content%20Images/Screenshot_2024-08-20_at_10.34.24.png)\n\n- GitLab’s [Secure stage](https://about.gitlab.com/features/?stage=verify) features, as mentioned above, deliver security-based value to GitLab’s [Verify](https://about.gitlab.com/features/?stage=verify), [Package](https://about.gitlab.com/features/?stage=package), and [Deploy](https://about.gitlab.com/features/?stage=deploy) stages. These stages include core [Continuous Integration](https://about.gitlab.com/features/?stage=verify)/[Continuous Delivery](https://about.gitlab.com/features/?stage=deploy) features, such as [MR Pipelines](https://docs.gitlab.com/ee/ci/merge_request_pipelines/) and [Protected Runners](https://docs.gitlab.com/ee/ci/runners/#prevent-runners-from-revealing-sensitive-information), as well as several artifact registries to meet your organizational needs.\n\n- GitLab’s [Monitor stage](https://about.gitlab.com/features/?stage=monitor) offers features such as [Alerts](https://docs.gitlab.com/ee/operations/incident_management/alerts.html) and [Incidents](https://docs.gitlab.com/ee/operations/incident_management/incidents.html) to help organizations become aware of incidents and track them to remediation all within GitLab.\n\n- GitLab’s [Govern stage](https://about.gitlab.com/features/?stage=govern) features, as the name implies, set the standard for who can use a GitLab instance and in which way. They also help support an organization’s overall compliance efforts.\n  - Features such as [SCIM](https://docs.gitlab.com/ee/administration/settings/scim_setup.html), [SSO](https://docs.gitlab.com/ee/user/group/saml_sso/), and [Custom Roles](https://docs.gitlab.com/ee/user/custom_roles.html) effectively support authentication and authorization lifecycles.\n  - [MR Approval Policies](https://docs.gitlab.com/ee/user/application_security/policies/scan-result-policies.html) are a powerful tool for implementing a security gate in your software development lifecycle. Using MR Approval Policies, an organization can require additional approvals for events such as when vulnerabilities are detected after a commit, certain license types are detected in dependencies, or for any MR made against a particular protected branch. MR Approval Policies are available with a GitLab Ultimate subscription.\n  - With [GitLab Duo](https://about.gitlab.com/gitlab-duo/), organizations can leverage the [Vulnerability Explanation](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/index.html#explaining-a-vulnerability) feature to:\n    - summarize the vulnerability\n    - help developers and security analysts understand the vulnerability, how it could be exploited, and how to fix it\n    - provide a suggested mitigation\n  -  [Streaming Audit Events](https://docs.gitlab.com/ee/user/compliance/audit_event_streaming.html) enable organizations to send audit events from their top-level group to an external location to receive all events about the group, subgroups, and projects. Streaming Audit Events are available with a GitLab Ultimate subscription.\n  -  A strong insider threat program combines multiple layers of detective, preventive, and reactive controls. [Git Abuse Rate Limiting](https://docs.gitlab.com/ee/user/group/reporting/git_abuse_rate_limit.html) automatically notifies administrators when a user downloads, clones, pulls, fetches, or forks more than a specified number of repositories of a group in a given timeframe. Organizations can go a step further and enable automatic banning to ban the offending user from the group and its subgroups. Git Abuse Rate Limiting is available with a GitLab Ultimate subscription.\n  -  To dive deeper on detected vulnerabilities, GitLab’s [Vulnerability Report](https://docs.gitlab.com/ee/user/application_security/vulnerability_report/) provides information about vulnerabilities from scans of the default branch. Vulnerability Reports are available with a GitLab Ultimate subscription.\n\n![Vulnerability Report](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674396/Blog/Content%20Images/Screenshot_2024-08-20_at_10.32.02.png)\n\n**Section 2(j)- the use of multi-factor authentication or continuous authentication solutions, secured voice, video and text communications, and secured emergency communication systems within the entity, where appropriate.**\n\nGitLab offers several multi-factor authentication options to support a stronger authentication process. Customers can leverage the [multi-factor authentication option](https://docs.gitlab.com/ee/user/profile/account/two_factor_authentication.html) that meets their requirements, or they can implement [SSO](https://docs.gitlab.com/ee/user/group/saml_sso/) to enhance the authentication process even further.\n\n## What’s next for NIS2?\n\nWhile member states have yet to issue broad guidance in response to the NIS2 directive, the GitLab platform is well positioned to address software development lifecycle requirements as they arise.\n\nTo learn more about the features throughout this article, see our library of [tutorials](https://docs.gitlab.com/ee/tutorials/).\n\n> To get started, [sign up for a free trial of GitLab Ultimate](https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/blog&glm_content=default-saas-trial).\n",[9,693,717],{"slug":2553,"featured":91,"template":698},"how-gitlab-helps-meet-nis2-requirements","content:en-us:blog:how-gitlab-helps-meet-nis2-requirements.yml","How Gitlab Helps Meet Nis2 Requirements","en-us/blog/how-gitlab-helps-meet-nis2-requirements.yml","en-us/blog/how-gitlab-helps-meet-nis2-requirements",{"_path":2559,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2560,"content":2566,"config":2573,"_id":2575,"_type":14,"title":2576,"_source":16,"_file":2577,"_stem":2578,"_extension":19},"/en-us/blog/how-indeed-transformed-its-ci-platform-with-gitlab",{"title":2561,"description":2562,"ogTitle":2561,"ogDescription":2562,"noIndex":6,"ogImage":2563,"ogUrl":2564,"ogSiteName":685,"ogType":686,"canonicalUrls":2564,"schema":2565},"How Indeed transformed its CI platform with GitLab","The world's #1 job site migrated thousands of projects to GitLab CI, boosting productivity and cutting costs. Learn the benefits they realized, including a 79% increase in daily pipelines.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099351/Blog/Hero%20Images/Blog/Hero%20Images/Indeed-blog-cover-image-2_4AgA1DkWLtHwBlFGvMffbC_1750099350771.png","https://about.gitlab.com/blog/how-indeed-transformed-its-ci-platform-with-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How Indeed transformed its CI platform with GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Carl Myers\"}],\n        \"datePublished\": \"2024-08-27\",\n      }",{"title":2561,"description":2562,"authors":2567,"heroImage":2563,"date":2569,"body":2570,"category":1073,"tags":2571},[2568],"Carl Myers","2024-08-27","***Editor's note: From time to time, we invite members of our customer community to contribute to the GitLab Blog. Thanks to Carl Myers, Manager of CI Platforms at Indeed, for sharing your experience with GitLab.***\n\nHere at Indeed, our mission is to help people get jobs. Indeed is the [#1 job site](https://www.indeed.com/about?isid=press_us&ikw=press_us_press%2Freleases%2Faward-winning-actress-viola-davis-to-keynote-indeed-futureworks-2023_textlink_https%3A%2F%2Fwww.indeed.com%2Fabout) in the world with more than 350 million unique visitors every month.\n\nFor Indeed's Engineering Platform teams, we have a slightly different motto: \"We help people to help people get jobs.\" As part of a data-driven engineering culture that has spent the better part of two decades always putting the job seeker first, we are responsible for building the tools that not only make this possible, but empower engineers to deliver positive outcomes to job seekers every day.\n\nGitLab Continuous Integration has allowed Indeed’s CI Platform team of just 11 people to effectively support thousands of users across the company. Other benefits Indeed has realized by moving to GitLab CI include:\n- 79% increase in daily pipelines\n- 10-20% lower CI hardware costs\n- Decreased support burden\n\n## Evolving our CI platform: From Jenkins to a scalable solution\n\nLike many large technology companies, we built our CI platform organically as the company scaled, using the de facto open source and industry standard solutions available at the time. Back in 2007, when Indeed had fewer than 20 engineers, we were using Hudson, Jenkins’ direct predecessor.\n\nToday, through nearly two decades of growth, we have thousands of engineers. As new technology became available, we made incremental improvements, switching to Jenkins around 2011. Another improvement allowed us to move most of our workloads to dynamic cloud worker nodes using [AWS EC2](https://aws.amazon.com/ec2/). As we entered the Kubernetes age, however, the system architecture reached its limits.\n\nJenkins’ architecture was not created with the cloud in mind. Jenkins operates by having a \"controller\" node, a single point of failure that runs critical parts of a pipeline and farms out certain steps to worker nodes (which can scale horizontally to some extent). Controllers are also a manual scaling axis.\n\nIf you have too many jobs to fit on one controller, you must partition your jobs across controllers manually. CloudBees offers ways to mitigate this, including the CloudBees Jenkins Operations Center, which allows you to manage your constellation of controllers from a single centralized place. However, controllers remain challenging to run in a Kubernetes environment because each controller is a fragile single point of failure. Activities like node rollouts or hardware failures cause downtime.\n\nIn addition to the technical limitations baked into Jenkins itself, our CI platform also had several problems of our own making. For example, we used the Groovy Jenkins DSL to generate jobs from code in each repository. This led to each project having its own copy-pasted job pipeline, resulting in hundreds of versions that were hard to maintain and update. While Indeed’s engineering culture values flexibility and allows teams to operate in separate repositories, this flexibility became a burden as teams spent too much time addressing regular maintenance requests.\n\nRecognizing our technical debt, we turned to the [Golden Path pattern](https://tag-app-delivery.cncf.io/whitepapers/platforms/), which allows flexibility while providing a default route to simplify updates and encourage consistent practices across projects.\n\nThe CI Platform team at Indeed is not very large. Our team of around 11 engineers supports thousands of users, fielding support requests, performing upgrades and maintenance, and enabling always-on support for our global company.\n\nBecause our team not only supports our GitLab instance but also the entire CI platform, including the artifact server, our shared build code, and multiple other custom components of our platform, we had our work cut out for us. We needed a plan that would help us address our challenges while making the most efficient use of our existing resources.\n\n## Moving to GitLab CI\n\nAfter a careful design review with key stakeholders, we decided to migrate the entire company from Jenkins to GitLab CI. The primary reasons for choosing GitLab CI were:\n- We were already using GitLab for source code management.\n- GitLab is a complete offering that provides everything we need for CI.\n- GitLab CI is designed for scalability and the cloud.\n- GitLab CI enables us to write templates that extend other templates, which is compatible with our golden path strategy.\n- GitLab is open source software and the GitLab team has always been supportive in helping us submit fixes, giving us extra flexibility and reassurance.\n\nBy the time we officially announced that the GitLab CI Platform would be generally available to users, we already had 23% of all builds happening in GitLab CI from a combination of grassroots efforts and early adopters.\n\nThe challenge of the migration, however, would be the long tail. Due to the number of custom builds in Jenkins, an automated migration tool would not work for the majority of teams. Most of the benefits of the new system would not come until the old system was at 0%. Only then could we turn off the hardware and save the CloudBees license fee.\n\n## Feature parity and the benefits of starting over\n\nThough we support many different technologies at Indeed, the three most common languages are Java, Python, and JavaScript. These language stacks are used to make libraries, deployables (web services or applications), and cron jobs (a process that runs at regular intervals, for example, to build a data set in our data lake). Each of these formed a matrix of project types (Java Library, Python Cronjob, JavaScript Webapp, etc.) for which we had a skeleton in Jenkins. Therefore, we had to produce a golden path template in GitLab CI for each of these project types.\n\nMost users could use these recommended paths without change, but for those who did require customization, the golden path would still be a valuable starting point and enable them to change only what they needed, while still benefiting from centralized template updates in the future.\n\nWe quickly realized that most users, even those with customizations, were happy to take the golden path and at least try it. If they missed their customizations, they could always add them later. This was a surprising result! We thought that teams who had invested in significant customization would be loath to give them up, but in the majority of cases teams just didn't care about them anymore. This allowed us to migrate many projects very quickly — we could just drop the golden path (a small file about 6 lines long with includes) into their project, and they could take it from there.\n\n## InnerSource to the rescue\n\nThe CI Platform team also adopted a policy of \"external contributions first\" to encourage everyone in the company to participate. This is sometimes called InnerSource. We wrote tests and documentation to enable external contributions — contributions from outside our immediate team — so teams that wanted to write customizations could instead include them in the golden path behind a feature flag. This let them share their work with others and ensure we didn't break them moving forward (because they became part of our codebase, not theirs).\n\nThis also had the benefit that particular teams who were blocked waiting for a feature they needed were empowered to work on the feature themselves. We could say \"we plan to implement the feature in a few weeks, but if you need it earlier than that we are happy to accept a contribution.\" In the end, many core features necessary for parity were developed in this manner, more quickly and better than our team had resources to do it. The migration would not have been a success without this model.\n\n## Ahead of schedule and under budget\n\nOur CloudBees license expired on April 1, 2024. This gave us an aggressive target to achieve the full migration. This was particularly ambitious considering that at the time, 80% of all builds (60% of all projects) still used Jenkins for their CI. This meant over 2,000 [Jenkinsfiles](https://www.jenkins.io/doc/book/pipeline/jenkinsfile/) would still need to be rewritten or replaced with our golden path templates.\n\nTo achieve this target, we made documentation and examples available, implemented features where possible, and helped our users contribute features where they were able.\n\nWe started regular office hours, where anyone could come and ask questions or seek our help to migrate. We additionally prioritized support questions relating to migration ahead of almost everything else. Our team became GitLab CI experts and shared that expertise inside our team and across the organization.\n\nAutomatic migration for most projects was not possible, but we discovered it could work for a small subset of projects where customization was rare. We created a Sourcegraph batch change campaign to submit merge requests to migrate hundreds of projects, and poked and prodded our users to accept these MRs.\n\nWe took success stories from our users and shared them widely. As users contributed new features to our golden paths, we advertised that these features \"came free\" when you migrated to GitLab CI. Some examples included built-in security and compliance scanning, Slack notifications for CI builds, and integrations with other internal systems.\n\nWe also conducted a campaign of aggressive \"scream tests.\" We automatically disabled Jenkins jobs that hadn't run or succeeded in a while, and told users that if they needed them, they could turn them back on. This was a low-friction way to identify which jobs were actually needed. We had thousands of jobs that hadn't been run a single time since our last CI migration (which was Jenkins to Jenkins). This told us we could safely ignore almost all of them.\n\nIn January 2024, we nudged our users by announcing that all Jenkins controllers would become read-only (no builds) unless an exception was explicitly requested. We had much better ownership information for controllers and they generally aligned with our organization's structure, so it made sense to focus on controllers rather than jobs. The list of controllers was also a much more manageable list than the list of jobs.\n\nTo obtain an exception, we asked our users to find their controllers in a spreadsheet and put their contact information next to each one. This enabled us to get a guaranteed up-to-date list of stakeholders we could follow up with as we sprinted to the finish line, but also enabled users to clearly let us know which jobs they absolutely needed. At peak, we had about 400 controllers; by January we had 220, but only 54 controllers required exceptions (several of them owned by us, to run our tests and canaries).\n\n![Indeed - Jenkins Controller Count graph](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099357/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750099357392.png)\n\nWe had a manageable list of around 50 teams we divided among our team and started doing outreach to understand how each team was progressing with the migration. We spent January and February discovering that some teams planned to finish their migration without our help before February 28 others were planning to deprecate their projects before then, and a very small number were very worried they wouldn't make it.\n\nWe were able to work with this smaller set of teams and provide them with “white-glove” service. We still explained that while we lacked the expertise necessary to do the migration for them, we could partner with a subject matter expert from their team. For some projects, we wrote and they reviewed; for others, they wrote and we reviewed. In the end, all of our work paid off and we turned off Jenkins on the very day we had announced 8 months earlier.\n\n## The results: Enhanced CI efficiency and user satisfaction\n\nAt its peak, our Jenkins CI platform ran over 14,000 pipelines per day and serviced our thousands of projects. Today, our GitLab CI platform has run over 40,000 pipelines in a single day and regularly runs over 25,000 per day. The incremental cost of each job of each pipeline is similar to Jenkins, but without the overhead of hardware to run the controllers. Additionally, these controllers served as single points of failure and scaling limiters that forced us to artificially divide our platform into segments. While an apples-to-apples comparison is difficult, we find that with this overhead gone our CI hardware costs are 10-20% lower. Additionally, the support burden of GitLab CI is lower since the application automatically scales in the cloud, has cross-availability-zone resiliency, and the templating language has excellent public documentation available.\n\nA benefit just as important, if not moreso, is that now we are at over 70% adoption of our golden paths. This means that we can roll out an improvement and over 5,000 projects at Indeed will benefit immediately with no action required on their part. This has enabled us to move some jobs to more cost-effective ARM64 instances, keep users' build images updated more easily, and better manage other cost saving opportunities. Most importantly, our users are happier with the new platform.\n\n__About the author:__\n*Carl Myers lives in Sacramento, CA, and is the manager of the CI Platform team at Indeed. Carl has spent his nearly two-decade career dedicated to building internal tools and developer platforms that delight and empower engineers at companies large and small.*\n\n**Acknowledgements:**\n*This migration would not have been possible without the tireless efforts of Tron Nedelea, Eddie Huang, Vivek Nynaru, Carlos Gonzalez, Lane Van Elderen, and the rest of the CI Platform team. The team also especially appreciates the leadership of Deepak Bitragunta, and Irina Tyree for helping secure buy-in, resources and company wide alignment throughout this long project. Finally, our thanks go out to everyone across Indeed who contributed code, feedback, bug reports, and helped migrate projects.*\n\n**This is an edited version of the article [How Indeed Replaced Its CI Platform with Gitlab CI](https://engineering.indeedblog.com/blog/2024/08/indeed-gitlab-ci-migration/), originally published on the Indeed engineering blog.**",[1075,109,2572,9],"user stories",{"slug":2574,"featured":91,"template":698},"how-indeed-transformed-its-ci-platform-with-gitlab","content:en-us:blog:how-indeed-transformed-its-ci-platform-with-gitlab.yml","How Indeed Transformed Its Ci Platform With Gitlab","en-us/blog/how-indeed-transformed-its-ci-platform-with-gitlab.yml","en-us/blog/how-indeed-transformed-its-ci-platform-with-gitlab",{"_path":2580,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2581,"content":2587,"config":2592,"_id":2594,"_type":14,"title":2595,"_source":16,"_file":2596,"_stem":2597,"_extension":19},"/en-us/blog/how-to-gitlab-single-sign-on-with-saml-scim-and-azures-entra-id",{"title":2582,"description":2583,"ogTitle":2582,"ogDescription":2583,"noIndex":6,"ogImage":2584,"ogUrl":2585,"ogSiteName":685,"ogType":686,"canonicalUrls":2585,"schema":2586},"How-to: GitLab Single Sign-on with SAML, SCIM, and Azure’s Entra ID","Follow this detailed walk-through of the configuration steps required to configure GitLab Single Sign-on, using Microsoft Azure’s Entra ID as the identity provider.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098047/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_1097303277_6gTk7M1DNx0tFuovupVFB1_1750098046895.jpg","https://about.gitlab.com/blog/how-to-gitlab-single-sign-on-with-saml-scim-and-azures-entra-id","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How-to: GitLab Single Sign-on with SAML, SCIM, and Azure’s Entra ID\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Rob Jackson\"}],\n        \"datePublished\": \"2025-01-23\",\n      }",{"title":2582,"description":2583,"authors":2588,"heroImage":2584,"date":2511,"body":2590,"category":693,"tags":2591},[2589],"Rob Jackson","As organizations increase in size, it becomes increasingly difficult and critical to ensure that the right team members have access to the right groups and projects within their development platform. GitLab offers some powerful methods to manage user access, especially now with [custom roles](https://about.gitlab.com/blog/how-to-tailor-gitlab-access-with-custom-roles/), but performing this at scale through a point-and-click user interface can be frustrating. However, all is not lost. You can use Security Assertion Markup Language (SAML) and System for Cross-domain Identity Management (SCIM) as a solution. (There are moments where I’m grateful for acronyms.) \n\nI was researching this topic for a particular customer, and walking through the GitLab documentation on the capabilities, but I never felt like I truly understood the integration. As is often the case, especially when dealing with integrating components, the knowledge from experience far outweighs that gained from reading or watching. In that light, I wanted to share my steps along this path and invite you all to join me. All you need is a free trial of Microsoft Azure Entra ID and GitLab Premium with a top-level group on GitLab.com.  \n\n**Note:** This exercise produces a working integration, however, for production environments there may be necessary deviations. For example, the user account email for the identity provider (Entra ID in this case) will likely not match your GitLab account email. \n\n## Creating the application in Entra ID\n\nFirst, go to the Entra ID admin center. Within the **Applications** area, select **Enterprise Applications**. We’re going to create a new application, and then create our own application.\n\n![Entra ID application creation flow](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image13_aHR0cHM6_1750098073325.png)\n\n\u003Ccenter>\u003Ci>Figure 1: Entra ID application creation flow\u003C/i>\u003C/center>\u003Cbr>\n\nWith our new application created, we can start configuring the single sign-on (SSO) parameters for our application. For this task, you may want to have side-by-side browser windows. One window on your Entra ID application, and another window on the SAML settings for your GitLab group. Those settings are located under **Settings**, then SAML SSO on the left side of your GitLab window, as shown in Figure 2. If you don’t see this option, you aren’t in the top-level group, don’t have permission to configure SAML, or don’t have GitLab Premium enabled for that group.\n\n![GitLab SAML configuration](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750098073326.png)\n\n\u003Ccenter>\u003Ci>Figure 2: GitLab SAML configuration\u003C/i>\u003C/center>\u003Cbr>\n\nWithin your Entra ID interface, select **Single sign-on** and click the SAML card.\n\n![Entra ID SAML configuration](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image24_aHR0cHM6_1750098073328.png)\n\n\u003Ccenter>\u003Ci>Figure 3: Entra ID SAML configuration\u003C/i>\u003C/center>\u003Cbr>\n\nWith the side-by-side view, the SAML configuration settings are on the left and the GitLab SSO settings on the right. \n\n![Side-by-side view of Entra ID and GitLab](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image16_aHR0cHM6_1750098073330.png)\n\n\u003Ccenter>\u003Ci>Figure 4: Side-by-side view of Entra ID and GitLab\u003C/i>\u003C/center>\u003Cbr>\n\nNow we can start copying and pasting parameters. Within the Entra ID interface, select **Edit** within the “Basic SAML Configuration” block. The parameter sources and destination are identified in the following table.\n\n| Source (GitLab) | Destination (Entra ID) |\n| :---------------- | :------: | \n| Identifier        |   Identifier (Entity ID)   | \n| Assertion consumer service URL |   Reply URL (Assertion Consumer Service URL)   | \n| GitLab single sign-on URL    |  Sign on URL (Optional)   | \n\n\u003Cbr>\nOnce completed, your side-by-side view should appear similar to the following (noting the URLs are unique to your environment).\u003Cbr>\n\n![Completed basic SAML SSO configuration](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image9_aHR0cHM6_1750098073332.png)\n\n\u003Ccenter>\u003Ci>Figure 5: Completed basic SAML SSO configuration\u003C/i>\u003C/center>\u003Cbr>\n\nClick **Save** within the Entra ID “Basic SML Configuration” window to save your hard work thus far. Note: You may need to click on the “X” in the upper right of the “Basic SAML Configuration” window if it doesn’t close automatically. \n\nAfter this window closes, you may get a popup to test single sign-on with your application. Select **No, I’ll test later**, because we still have more work to do (there is always more work to do). \n\n## Configuring attributes and claims\n\nWithin the Entra ID user interface, look for the section for “Attributes and Claims,” and click the **Edit** pencil icon. The first thing we want to do is modify the Unique User identifier (Name ID) value, so click on that row and set the Source attribute to **user.objectid**. Additionally, the Name identifier format must be updated, and set to **Persistent**.\n\n![Configuring attributes and claims](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image14_aHR0cHM6_1750098073333.png)\n\n\u003Ccenter>\u003Ci>Figure 6: Configuring attributes and claims\u003C/i>\u003C/center>\u003Cbr>\n\nSave that claim configuration. Now we have additional claims to configure, but there are only three that we need here. So, feel free to go wild and delete those default four items under **Additional claims**, or you can edit the existing ones to match the table below. Note that these values (specifically, the Name) are case sensitive. \n\u003Cbr>\n\n| Name | Namespace | Source Attribute |\n| :---------------- | :------: | :------: | \n|emailaddress |http://schemas.microsoft.com/ws/2008/06/identity/claims | user.otheremail |\n| NameID | http://schemas.microsoft.com/ws/2008/06/identity/claims |user.objectid |\n\n\u003Cbr>\n\nThe resulting claims configuration should appear as follows. Note the use of **otheremail** for the “emailaddress” attribute. This was necessary for me as my primary email addresses within Entra ID are not the addresses used on GitLab.com. If you recall, when I set up my “user,\" I modified the contact information to include my gitlab.com email address as one of my “Other emails.” \n\n![Configuring the claims](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image21_aHR0cHM6_1750098073335.png)  \n\n\u003Ccenter>\u003Ci>Figure 7: Configuring the claims\u003C/i>\u003C/center>\u003Cbr>\n\nWith your attributes configured, under the Advance settings, enable **Include attribute name format** setting.\n\n![Advanced claims configuration](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750098073336.png)\n\n\u003Ccenter>\u003Ci>Figure 8: Advanced claims configuration\u003C/i>\u003C/center>\u003Cbr>\n\nYour \"Attributes and Claims\" window should now look similar to Figure 9 below.\n\n![Configured attributes and claims](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image18_aHR0cHM6_1750098073337.png)\n\n\u003Ccenter>\u003Ci>Figure 9: Configured attributes and claims\u003C/i>\u003C/center>\u003Cbr>\n\nIf you’re happy, or at least relatively content, with your configuration, click the “X” in the top right corner of the \"Attributes and Claims\" window to close it. \n\n## Configuring and assigning users\n\nNow that we have our application configured, we need to ensure that our users have been assigned to that application. I'll assume you’re working with a test instance that does not have the same email address as what is configured within your GitLab.com namespace. \n\nSo let’s go to the “Users and groups” within the Entra ID user interface for your configured application.\n\n![Managing application users and groups](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image17_aHR0cHM6_1750098073338.png)\n\n\u003Ccenter>\u003Ci>Figure 10: Managing application users and groups\u003C/i>\u003C/center>\u003Cbr>\n\nSelect **Add user/group**, and under the “Users and groups” where it says “None Selected,” click that text. Now you can select the user(s) to add to your application. These are the users that will be permitted to log into GitLab, authenticating themselves through Entra ID.\n\n![User selection](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image23_aHR0cHM6_1750098073339.png)\n\n\u003Ccenter>\u003Ci>Figure 11: User selection\u003C/i>\u003C/center>\u003Cbr>\n\nOnce selected, at the bottom of that page, click **Select**, and at the bottom of the next, select **Assign**. Now you should have a user assigned to your application.\n\n![User assigned to application](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image12_aHR0cHM6_1750098073340.png)\n\n\u003Ccenter>\u003Ci>Figure 12: User assigned to application\u003C/i>\u003C/center>\u003Cbr>\n\nNext, we need to ensure that the GitLab.com email address for that user is configured correctly. By clicking on the user itself, we can modify or configure some  additional information about that user. We can see below, the User principal name, which is based on an “onmicrosoft” domain. This is not the email address I have associated with my GitLab.com account. If you recall that we set the “Email address” attribute to “otheremail,” this is where we now configure that “other” email address. \n\n![User properties](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image20_aHR0cHM6_1750098073341.png)\n\n\u003Ccenter>\u003Ci>Figure 13: User properties\u003C/i>\u003C/center>\u003Cbr>\n\nClick the option to **Edit properties** for the user, and click on the **Contact Information** heading. Here we can add other emails – more specifically, the email address utilized for your GitLab.com account.  \n\n![Configuration of alternate email address](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image15_aHR0cHM6_1750098073342.png)\n\n\u003Ccenter>\u003Ci>Figure 14: Configuration of alternate email address\u003C/i>\u003C/center>\u003Cbr>\n\nThat should complete the configuration parameters that we need in Entra ID, but wait, there’s more. \n\nWithin the GitLab side now, you will need to configure a couple parameters. First, you might as well enable SAML for the group as that’s kind of a key piece here. GitLab offers some additional options to disable password authentication or enforce SSO to reduce the security risks within your application, but we’ll leave those unchecked for now. Similar to the table above, we’ll need a couple things from Entra ID to configure into GitLab. Please refer to the table below. \n\u003Cbr>\n\n| Source (Entra ID) | Destination (GitLab) | \n| :---------------- | :------: | \n|Login URL |Identity provider single sign-on URL |\n| Thumbprint | Certificate fingerprint|\n\n\u003Cbr>\n\n![GitLab SAML configuration from Entra ID](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image25_aHR0cHM6_1750098073343.png)\n\n\u003Ccenter>\u003Ci>Figure 15: GitLab SAML configuration from Entra ID\u003C/i>\u003C/center>\u003Cbr>\n\nLastly, you want to configure the default membership role for users logging in via SAML. Note that the access that you set for users here will cascade down to other groups and projects within your top-level group. Therefore, I would strongly recommend NOT setting this role to be “Owner.” Either “Guest” or “Minimal Access” would be acceptable options here, depending on the security posture of your organization. For more information about what these roles can and can not do, refer to the GitLab documentation on [Roles and Permissions](https://docs.gitlab.com/ee/user/permissions.html#roles). Now, save your work on the GitLab interface by clicking that beautiful blue **Save changes** button.\n\nWith your GitLab settings saved, you can now test your setup. I would encourage you to do this both through the “Verify SAML Configuration” on the GitLab system as well as with the Entra ID SSO \"Test\" button.\n\n## Troubleshooting SAML\n\nIn addition to the troubleshooting steps included within [GitLab documentation](https://docs.gitlab.com/ee/user/group/saml_sso/troubleshooting.html), I wanted to include a couple other items that I personally experienced. \n\nIf you get an error stating that the SAML reference did not contain an email address, check the Claim name for your email within the “Attributes and Claims” section within your Entra ID application. With GitLab 16.7, we added support for the “2008” attribute names, and at least for the email address setting, I found the default “xmlsoap” name for the email address claim to be a disappointing failure. \n\nAnother common error is “SAML Name ID and email address do not match your user account.” As you may suspect, this error is caused by a mismatch of the “NameID” and “emailaddress” attributes within the Entra ID application. This could be a misconfiguration of the “Attributes and Claims,” but it could also be that the properties of your test user don’t match your configuration. One helpful method to identify exactly what is coming through the SAML exchange is to use a SAML Tracer or SAML Message Decoder plugin with your web browser. \n\n## SCIM\n\nNow that you have SAML configured to enable users to log in via your Entra ID application, let’s make sure that people are assigned to the proper group(s) upon login. This can be incredibly helpful at scale, where instead of manually identifying which groups the particular users belong to, GitLab can learn this information from your identity application, Entra ID in this case. \n\nBecause SCIM utilizes groups to identify group membership, we need to create a group within Entra ID and add the relevant user(s) to the group. For this we’ll need the main administration menu for Entra ID. \n\n![Entra ID Group configuration](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image19_aHR0cHM6_1750098073344.png)\n\n\u003Ccenter>\u003Ci>Figure 16: Entra ID Group configuration\u003C/i>\u003C/center>\u003Cbr>\n\nWe’re going to create a new group and assign our user(s) to that group. So click **New group** and configure a new group, which only requires you to configure a “Group name.” I used the default group type of “Security.” Leave the “Membership type” as “Assigned.” From this window, we can also assign the members.\n\n![Creating a New Entra ID Group](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098073345.png)\n\n\u003Ccenter>\u003Ci>Figure 17: Creating a New Entra ID Group\u003C/i>\u003C/center>\u003Cbr>\n\nOnce you’ve added the member(s), click **Create** in the bottom of that window. With your group created, and the user(s) assigned to the group, we can configure SCIM.\n\nImmediately below the SAML configuration section within the GitLab UI, you’ll see the “SCIM Token” area. Here you can generate a new token, and copy the endpoint URL, both of which will be useful for the next steps. Note that if you forget or already have a SCIM token, it can be reset. \n\n![SCIM token and endpoint within GitLab](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image10_aHR0cHM6_1750098073345.png)\n\n\u003Ccenter>\u003Ci>Figure 18: SCIM token and endpoint within GitLab\u003C/i>\u003C/center>\u003Cbr>\n\nWith this information saved, return to your Entra ID application configuration. Within the left side menu, you’ll find the following: \n\n![Provisioning SCIM within Entra ID](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750098073346.png)\n\n\u003Ccenter>\u003Ci>Figure 19: Provisioning SCIM within Entra ID\u003C/i>\u003C/center>\u003Cbr>\n\nWithin the \"Provisioning\" section, click on **New Configuration**, which opens a new page where that token and URL from GitLab will be used. \n\n![New provisioning configuration](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image22_aHR0cHM6_1750098073348.png)\n\n\u003Ccenter>\u003Ci>Figure 20: New provisioning configuration\u003C/i>\u003C/center>\u003Cbr>\n\nFeel free to test the connection to ensure that you’ve configured the parameters properly. After testing, click on the **Create** button to establish the configuration and work on our mappings and settings. You may need to click the “X” in the top right corner of the panel to return to the overview configuration.\n\nExpand the “Mappings,” which includes two parameters; “Provision Microsoft Entra ID Groups” and “Provision Microsoft Entra ID Users.” SCIM group provisioning isn’t currently supported in GitLab, and although it doesn’t break the integration, keeping group provisioning enabled may cause negligible error messages. Therefore, we want to disable “Provision Microsoft Entra ID Groups,” so click that entry and set the “Enabled” field to “No.” \n\n![Provisioning attribute mapping](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750098073349.png)\n\n\u003Ccenter>\u003Ci>Figure 21: Provisioning attribute mapping\u003C/i>\u003C/center>\u003Cbr>\n\nSave that configuration and select “Provision Microsoft Entra ID Users.” Validate that all three \"Target Object Actions\" are enabled, and then proceed to the “Attribute Mapping” section. Delete all existing mappings available to delete (I find this easier because attributes can’t be assigned twice), and then configure the Attribute Mappings per the following table:\n\n| customappsso Attribute (Destination) | Microsoft Entra ID Attribute (Source) | Matching Precedence | Mapping Type |\n| :---------------- | :------: | :------: | :------: | \n|externalID|objectId|1|Direct|\n|active|Switch([IsSoftDeleted], , \"False\", \"True\", \"True\", \"False\")| |Expression|\n|userName|mailNickname| |Direct|\n|name.formatted|displayName| |Direct|\n|Emails[type eq “other”].value|userPrincipalName||Direct|\n\n\u003Cbr>\n\n![Editing attributes](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image26_aHR0cHM6_1750098073349.png)\n\n\u003Ccenter>\u003Ci>Figure 22: Editing attributes\u003C/i>\u003C/center>\u003Cbr>\n\nAfter configuring all of the attribute mappings, the result should be similar to that found in Figure 22.\n\n![Completed attribute mapping configuration](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750098073350.png)\n\n\u003Ccenter>\u003Ci>Figure 23: Completed attribute mapping configuration\u003C/i>\u003C/center>\u003Cbr>\n\nNote the use of the “other” email within the **customappssso** attribute. This relates back to the “other” email we configured for the user back in the Entra ID user properties. In a production situation, the emails for the SSO account and the email address for the account within GitLab should match. \n\nWith your mapping complete (congratulations, Ptolemy), there are some advanced configuration settings necessary. Underneath the \"Attribute Mappings,\" click the box for “Show advanced options.” Once this box is checked, a link called “Edit attribute list for customappsso” is revealed.\n\n![Advanced attribute configuration](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750098073351.png)\n\n\u003Ccenter>\u003Ci>Figure 24: Advanced attribute configuration\u003C/i>\u003C/center>\u003Cbr>\n\nClick that link, and ensure that the Name “ID” is both “Primary Key” and “Required,” and that “externalID” is also “Required.” These attributes both refer to a unique user ID generated by Entra ID. However, although the “id” itself is required, it is not consistently provided within the API calls. Therefore, GitLab relies on the “externalID” to ensure the proper connection between the Entra ID and GitLab user accounts. \n\n![Required attribute list](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098073351.png)\n\n\u003Ccenter>\u003Ci>Figure 25: Required attribute list\u003C/i>\u003C/center>\u003Cbr>\n\nSave these settings, and then close the “Attribute Mapping” page with the “X” in the top right of the window. Return to the \"Application Provisioning\" section and click **Start provisioning**. \n\nWithin GitLab, we need to configure the association between the group we configured within Entra ID and the level of access we want those users to have within the GitLab top-level group. Note that this association can be configured on each sub-group within GitLab for more extensive provisioning, but within GitLab, permissions flow downhill. Whatever permission you set for a user at a top-level group, or sub-group, will cascade down to all projects and groups contained therein. \n\nWithin the \"Settings\" portion of the GitLab menu, select **SAML Group Links**. Here is where you’ll configure the group name and determine what access level, or role, members of the Entra ID Group will have within this particular GitLab Group.\n\n![GitLab SAML Group link](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image27_aHR0cHM6_1750098073352.png)\n\n\u003Ccenter>\u003Ci>Figure 26: GitLab SAML Group link\u003C/i>\u003C/center>\u003Cbr>\n\nAs shown in Figure 26, I’ve configured my membership to The Academy such that any users within the dev-security group from Entra ID  will be granted Developer access. Note that this is a slight variation of what a typical production environment would look like. In most instances, the user account within the identity provider (Entra ID, in this case) would match the user’s corporate account email (and we wouldn’t require “other” emails). When configured properly, if the user does not already have an account on GitLab, one will be created for them tied to their SSO account. \n\n![GitLab SSO tutorial - image11](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098073/Blog/Content%20Images/Blog/Content%20Images/image11_aHR0cHM6_1750098073352.png)\n\n\u003Ccenter>\u003Ci>Figure 27: SAML Group Links configured\u003C/i>\u003C/center>\u003Cbr>\n\nNow that you’ve completed the configuration, give it a try! From another browser, preferably in private mode to ignore any cookies or other yummy artifacts, paste the link for the GitLab SSO URL found in the GitLab SAML configurations. You should be prompted to log in with your Entra ID credentials and gain the proper access to your GitLab group! \n\nCongratulations, you’ve made it! I hope you’ve learned from and appreciate the work here, and we can all rejoice in the fact that the users within the Play-Dough app can now all properly authenticate, with the right permissions, to The Academy!\n\n> Don't have a GitLab account? [Sign up for a free trial today](https://about.gitlab.com/free-trial/devsecops/).\n\n## Read more\n- [The ultimate guide to enabling SAML and SSO on GitLab.com](https://about.gitlab.com/blog/the-ultimate-guide-to-enabling-saml/)\n- [SAML SSO for GitLab.com groups documentation](https://docs.gitlab.com/ee/user/group/saml_sso/)",[719,693,9,951,930],{"slug":2593,"featured":6,"template":698},"how-to-gitlab-single-sign-on-with-saml-scim-and-azures-entra-id","content:en-us:blog:how-to-gitlab-single-sign-on-with-saml-scim-and-azures-entra-id.yml","How To Gitlab Single Sign On With Saml Scim And Azures Entra Id","en-us/blog/how-to-gitlab-single-sign-on-with-saml-scim-and-azures-entra-id.yml","en-us/blog/how-to-gitlab-single-sign-on-with-saml-scim-and-azures-entra-id",{"_path":2599,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2600,"content":2606,"config":2611,"_id":2613,"_type":14,"title":2614,"_source":16,"_file":2615,"_stem":2616,"_extension":19},"/en-us/blog/how-to-harmonize-agile-sprints-with-product-roadmaps",{"title":2601,"description":2602,"ogTitle":2601,"ogDescription":2602,"noIndex":6,"ogImage":2603,"ogUrl":2604,"ogSiteName":685,"ogType":686,"canonicalUrls":2604,"schema":2605},"How to harmonize Agile sprints with product roadmaps","Apply best practices and GitLab features to your product journey, including creating centralized roadmaps, conducting review sessions, and tracking sprint lifecycles.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097231/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2821%29_2pdp2MNB7SoP4MhhiI1WIa_1750097230664.png","https://about.gitlab.com/blog/how-to-harmonize-agile-sprints-with-product-roadmaps","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How to harmonize Agile sprints with product roadmaps\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Amanda Rueda\"}],\n        \"datePublished\": \"2025-02-04\",\n      }",{"title":2601,"description":2602,"authors":2607,"heroImage":2603,"date":2608,"body":2609,"category":1053,"tags":2610},[1050],"2025-02-04","Picture this: Product and Development teams are working in isolation. Product has created a 12-month roadmap and communicated it to internal stakeholders but didn't review it with their development team. Dev starts building the features planned for the upcoming sprint without considering the broader product roadmap, leading to missed opportunities to optimize timing, like running projects in parallel, accounting for team capacity, or building reusable APIs that could serve multiple initiatives. The lack of coordination results in inefficiencies and delayed value delivery.\n\nBalancing short-term wins with long-term vision isn’t easy; it requires clear communication, aligned priorities, and the right tools. In this guide, you'll learn strategies to help harmonize your Agile sprints with strategic roadmaps, tackle common challenges, and uncover actionable solutions tailored to your teams.\n\n## The importance of a single source of truth\n\nA consistent single source of truth for roadmaps with longer-range goals ensures you and your teams have access to up-to-date information about the bigger picture. In practice, this means maintaining a single, regularly updated platform where all roadmap details reside rather than keeping versions of the roadmap across multiple formats, each typically with slightly different information, causing a misaligned understanding of where you're headed.\n\n### Create a centralized roadmap\n\nBy creating a centralized roadmap for your team, you can:\n\n* communicate long-range strategy\n* minimize miscommunication\n* facilitate cross-functional alignment\n* quickly adapt to changes without losing context\n* self-serve information, reducing dependency on a single point of contact who retains the information\n\n***GitLab tip**: Use [epics](https://docs.gitlab.com/ee/user/group/epics/) and [Roadmap view](https://docs.gitlab.com/ee/user/group/roadmap/) to support both product planning and the transparent monitoring of delivery. The Roadmap view allows you to track progress, identify bottlenecks, and ensure alignment between high-level goals and sprint-level execution.*\n\n![Roadmap view for group](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097239/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097239117.png)\n\n## Collaborative roadmap review practices\n\nEstablish a regular review and sign-off process for roadmap updates that include Product, Engineering, and UX as part of the [product trio](https://www.producttalk.org/product-trio/). Collaborative reviews help you maintain alignment and minimize risk. At GitLab, I meet with my engineering manager and UX designer monthly to review and obtain sign-offs on any changes. We maintain a running sign-off on the roadmap wiki page itself that holds us accountable for keeping the schedule and provides transparency to the rest of the organization.\n\n#### How to extract value from review sessions\n\nTo make the most of the review session, aim for the following best practices:\n\n* Schedule routine reviews, monthly or quarterly, depending on how frequently the roadmap tends to fluctuate at your organization.\n* Validate alignment between product goals, UX lead time, and technical feasibility by discussing potential risks and dependencies upfront.\n  * Validate that the roadmap reflects current organizational business objectives.\n  * Ensure that design timelines are realistic and consider research or validation needs.\n  * Confirm that the roadmap allocates time for technical preparation, such as technical spikes or investigations, and ensures alignment with broader engineering priorities.\n* Optimize team utilization by considering capacity constraints and ensuring the sequence of work aligns with the team’s skill profile. This includes avoiding periods of underutilization or skill mismatches while effectively planning for situations like staffing level drops during holidays.\n* Right-size scope and set appropriate expectations about what can be achieved. We all want to do it all, but perfection is the enemy of progress so prioritize what truly matters to deliver incremental value efficiently. Seek opportunities to optimize by identifying ways to iterate or increase velocity, such as adjusting the order of work to reduce dependencies or leveraging reusable components to streamline development.\n* Encourage open dialogue about trade-offs and priorities to ensure all perspectives are considered. This collaborative approach helps identify creative solutions to challenges and builds consensus on the best path forward.\n\n***GitLab tip**: Use a [GitLab Wiki](https://docs.gitlab.com/ee/user/project/wiki/) page to complement the [Roadmap](https://docs.gitlab.com/ee/user/group/roadmap/) feature. In the wiki, you can include expanded context about your product roadmap, such as business rationale, links to user research, RICE scores, and details about dependencies or risks. Link directly to the roadmap for easy access, and leverage the upcoming discussion threads feature to encourage async collaboration and feedback from your team.*\n\n![PlanFlow product roadmap](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097239/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097239118.png)\n\n## Continuous direction validation and progress measurement\n\nThe goal of a product roadmap isn’t just to stay on track – it’s to deliver real value to your customers. To make space for sharing ongoing user feedback and behavioral data consider incorporating regular touchpoints across your product trio outside of sprint cycles. These sessions can be used to review insights, analyze trends, and ensure that the product roadmap continues to reflect the evolving needs of your users. By grounding roadmap updates using real user insights, you’re not only delivering on outcomes but also adapting to what really matters to your customers.\n\nThe value you ship might come in the form of improved usability, reduced technical debt, or entirely new capabilities. When the product trio is aligned on the roadmap vision, they’re also aligned on the outcomes you’re working to achieve.\n\nTo measure whether you’re on track to deliver those outcomes, you need to closely scope the intended results. Scope creep, like late user story additions, can delay your ability to ship value. Additionally, it’s important to identify work that was delivered but doesn’t align with the roadmap and understand why.\n\n### Sprint planning\n\nRemaining aligned with your product roadmap starts with thoughtful sprint planning. Here are some best practices to keep your team on track and focused on delivering value:\n\n* Clearly define, and narrowly scope, desired outcomes to ensure high confidence in delivery.\n* Identify potential late additions or adjustments that could delay delivery, and build in buffers to maintain focus.\n* Align on the sequence of work with your team to optimize for capacity, skill profiles, and reducing dependencies.\n* To maintain focus and improve confidence of delivering on time, avoid planning to 100% of the team’s capacity. Leave room (10%-20%) for unknowns or new discoveries that may surface during the sprint.\n\n### During the sprint\n\nStaying aligned with your roadmap during the sprint requires focus, communication, and constant evaluation. While delivering value is the goal, it’s equally important to ensure the work in progress aligns with the outcomes you’ve scoped and planned.\n\n* Continuously validate the work in progress against roadmap outcomes to ensure every sprint contributes to the bigger picture.\n* Encourage the team to regularly check if they’re still working toward the intended goals and outcomes.\n* Maintain open communication throughout the sprint. Use daily standups or async updates to surface risks, unplanned work, or dependencies early and adjust where necessary.\n* Be ruthless about protecting the sprint. While the urge to solve emerging problems is natural, unplanned work should be carefully evaluated to avoid derailing agreed-upon priorities.\n* Proactively manage scope creep. If new work surfaces mid-sprint, assess whether it aligns with the current roadmap outcome’s narrowly scoped focus. While additional ideas or features may align conceptually with the broader outcome, they may not fit into the immediate plan to deliver value as soon as possible. Document these suggestions and evaluate if they should be considered as part of future iterations or as a nice-to-have for the future, rather than introducing them into the current sprint and delaying agreed-upon priorities.\n\n### Sprint retros\n\nIn your sprint retrospectives, take time to reflect with your team on how well you are collectively progressing toward your desired outcomes. Questions to ask:\n\n* Did any unplanned work get introduced during the sprint that delayed your ability to deliver value? Identify why it happened and what adjustments can be made.\n* Did you deliver any work that deviated from the roadmap? Discuss what led to this and what you can learn for future planning.\n\nFrom sprint planning through retrospectives, staying focused on delivering tangible outcomes to users and stakeholders is a team responsibility. By aligning every step of the way, you ensure that your roadmap remains a clear guide for delivering value efficiently and consistently.\n\n***GitLab tip:** Use [burndown charts](https://docs.gitlab.com/ee/user/project/milestones/burndown_and_burnup_charts.html) to visualize progress and detect deviations early, helping your team stay focused on delivering outcomes.*\n\n![Burndown chart](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097239/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097239120.png)\n\n## Delivering roadmap outcomes with confidence\n\nHarmonizing Agile sprints with strategic roadmaps requires intentionality, team buy-in, and the proper tools. By creating a roadmap single source of truth, fostering collaborative reviews, and measuring progress towards outcomes, you can align execution with vision. With GitLab’s robust planning features, teams can turn challenges into opportunities for innovation and growth.\n\nReady to align your sprints with your strategic roadmap? [Start a free trial of GitLab](https://about.gitlab.com/free-trial/) today and explore the tools that can help you deliver outcomes with confidence.\n\n## Learn more\n\n- [Agile planning content hub](https://about.gitlab.com/blog/categories/agile-planning/)\n- [GitLab’s new Planner role for Agile planning teams](https://about.gitlab.com/blog/introducing-gitlabs-new-planner-role-for-agile-planning-teams/)\n- [Get to know the GitLab Wiki for effective knowledge management](https://about.gitlab.com/blog/get-to-know-the-gitlab-wiki-for-effective-knowledge-management/)",[908,719,888,9],{"slug":2612,"featured":91,"template":698},"how-to-harmonize-agile-sprints-with-product-roadmaps","content:en-us:blog:how-to-harmonize-agile-sprints-with-product-roadmaps.yml","How To Harmonize Agile Sprints With Product Roadmaps","en-us/blog/how-to-harmonize-agile-sprints-with-product-roadmaps.yml","en-us/blog/how-to-harmonize-agile-sprints-with-product-roadmaps",{"_path":2618,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2619,"content":2625,"config":2630,"_id":2632,"_type":14,"title":2633,"_source":16,"_file":2634,"_stem":2635,"_extension":19},"/en-us/blog/how-to-integrate-custom-security-scanners-into-gitlab",{"title":2620,"description":2621,"ogTitle":2620,"ogDescription":2621,"noIndex":6,"ogImage":2622,"ogUrl":2623,"ogSiteName":685,"ogType":686,"canonicalUrls":2623,"schema":2624},"How to integrate custom security scanners into GitLab","Learn how to extend the DevSecOps platform by adding custom security scanners to your workflows (includes an easy-to-follow tutorial).","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097082/Blog/Hero%20Images/Blog/Hero%20Images/securitycheck_securitycheck.png_1750097081856.png","https://about.gitlab.com/blog/how-to-integrate-custom-security-scanners-into-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How to integrate custom security scanners into GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2024-02-27\",\n      }",{"title":2620,"description":2621,"authors":2626,"heroImage":2622,"date":2627,"body":2628,"category":693,"tags":2629},[1599],"2024-02-27","GitLab, the most comprehensive DevSecOps platform, has everything you need to plan, manage, build, deploy, secure, govern, and monitor your applications. However, there are instances where you may want to extend GitLab with third-party or custom tools. For example, you might need to migrate to a DevSecOps platform from separate solutions, evaluate third-party tools, or integrate proprietary or custom-built solutions into GitLab.\n\nHere's what is covered:\n- [GitLab DevSecOps platform extensibility](#gitlab-devsecops-platform-extensibility)\n- [GitLab security scanner integration](#gitlab-security-scanner-integration)\n  - [Merge request security widget](#merge-request-security-widget)\n  - [Pipeline Security section](#pipeline-security-section)\n  - [Vulnerability Report](#vulnerability-report)\n  - [Vulnerability pages](#vulnerability-pages)\n  - [Security dashboard](#security-dashboard)\n  - [Scan Result Policy integration](#scan-result-policy-integration)\n- [Tutorial: Integrating custom security scanners](#tutorial-integrating-custom-security-scanners)\n  - [Creating a custom security scanner](#creating-a-custom-security-scanner)\n  - [Integrating a custom security scanner with GitLab](#integrating-a-custom-security-scanner-with-gitlab)\n\n## GitLab DevSecOps platform extensibility\n\nGitLab can be extended in many ways to support enhanced functionality that your organization may require. Some common examples of these integrations include:\n\n- external application integrations such as Jenkins and Slack\n- external issue tracking integrations such as Bugzilla and Jira\n- external authentication provider integrations such as LDAP and SAML\n- external security scanner integrations such as Fortify and Checkmarx\n- ability to respond to leaked secrets such as AWS and GCP access keys\n\nYou can see all the available integrations in the [Integrate with GitLab documentation](https://docs.gitlab.com/ee/integration/). (Note: Not all integrations are listed in the documentation.)\n\n## GitLab security scanner integration\n\n[Third-party security scanners](https://docs.gitlab.com/ee/integration/#security-improvements) or [custom-built security scanners](https://gitlab.com/gitlab-de/tutorials/security-and-governance/custom-scanner-integration) can be integrated into GitLab to populate the merge request widget, Pipeline Security section, Vulnerability Report, vulnerability pages, Security dashboard, and Scan Result Policies. Let's review each integration.\n\n### Merge request security widget\n\nA merge request contains a security widget which displays a summary of the newly detected vulnerabilities.\n\n![integrating security scanners - image 1](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097088837.png)\n\n\u003Ccenter>\u003Ci>Merge request security widget\u003C/i>\u003C/center>\n\u003Cp>\u003C/p>\n\nWhen you click on a vulnerability, you will see a popup that contains the following information:\n- status\n- description\n- project\n- file\n- identifiers\n- severity\n- tool\n- scanner provider\n\n![integrating security scanners - image 2](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097088838.png)\n\n\u003Ccenter>\u003Ci>Actionable vulnerabilities with details\u003C/i>\u003C/center>\n\n\u003Cp>\u003C/p>\n\nThese vulnerabilities are also actionable, which means that they can either be dismissed or a confidential issue can be created.\n\nThe results of a custom scanner can be used to populate the security widget. The vulnerability data is populated from the JSON schema the scanner emits.\n\n### Pipeline Security section\n\nAll enabled security analyzers run in the pipeline and output their results as artifacts. These artifacts are processed, including deduplication, and the results are listed on the Pipeline Security tab. From here, you can also download the resulting JSON files.\n\n![integrating security scanners - image 3](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image11_aHR0cHM6_1750097088840.png)\n\n\u003Ccenter>\u003Ci>Pipeline Security tab\u003C/i>\u003C/center>\n\u003Cp>\u003C/p>\n\nThe results of a custom scanner can be used to populate the Pipeline Security tab. The columns are filled in using the JSON schema the scanner emits.\n\n### Vulnerability Report\n\nThe Vulnerability Report provides information about vulnerabilities from scans of the default branch, including:\n\n- totals of vulnerabilities per severity level\n- filters for common vulnerability attributes\n- details of each vulnerability, presented in tabular layout\n\n![integrating security scanners - image 4](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750097088842.png)\n\n\u003Ccenter>\u003Ci>Vulnerability Report\u003C/i>\u003C/center>\n\u003Cp>\u003C/p>\n\nThe results of a custom scanner on the default branch can be used to populate the Vulnerability Report.\n\n### Vulnerability pages\n\nClicking on a vulnerability present within the Vulnerability Report takes you to its vulnerability page. Each vulnerability in a project has a vulnerability page that provides details such as:\n\n- description\n- when it was detected\n- current status\n- location detected\n- available actions\n- linked issues\n- actions log\n- solutions\n- identifier\n- training\n\nYou can use the data provided in the vulnerability page to triage a detected vulnerability as well as assist in its remediation.\n\n![integrating security scanners - image 5](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750097088844.png)\n\n\u003Ccenter>\u003Ci>Vulnerability page for secret detection vulnerability\u003C/i>\u003C/center>\n\u003Cp>\u003C/p>\n\nThe results of a custom scanner can be used to populate the vulnerability page. The vulnerability data is populated from the JSON schema the scanner emits.\n\n### Security dashboard\n\nSecurity dashboards are used to assess the security posture of your applications. GitLab provides you with a collection of metrics, ratings, and charts for the vulnerabilities detected by the security scanners run on your project. The security dashboard provides data such as:\n\n- vulnerability trends over a 30-, 60-, or 90-day timeframe for all projects in a group\n- a letter grade rating for each project based on vulnerability severity\n- the total number of vulnerabilities detected within the last 365 days and their severity levels\n\n![integrating security scanners - image 6](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750097088846.png)\n\n\u003Ccenter>\u003Ci>Group-level Security dashboard\u003C/i>\u003C/center>\n\u003Cp>\u003C/p>\n\nFrom the group-level Security dashboard you can click on a project to access its specific Security dashboard, which provides the 365-day view.\n\n![integrating security scanners - image 7](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097088847.png)\n\n\u003Ccenter>\u003Ci>Project-level Security dashboard\u003C/i>\u003C/center>\n\u003Cp>\u003C/p>\n\n### Scan Result Policy integration\n\nScan Result Policies are used to require approval based on the findings of one or more security scan jobs. This can prevent insecure code from being merged to production. Scan Result Policies are evaluated after a CI scanning job is fully executed, where policies are evaluated based on the job artifact reports that are published in the completed pipeline.\n\nFor example, you can create a Scan Result Policy that requires approval from project maintainers if a secret detection scanner finds any vulnerabilities. Here's how:\n\n1. On the left sidebar, select **Search or go to** and search for the project you wish to add a policy to.\n2. On the project left sidebar, go to **Secure > Policies**\n3. Select **New policy**\n4. In the **Scan result policy** section, select **Select policy**.\n5. Complete the fields:\n- Name: The name of the Policy\n- Description: The description of the Policy\n- Policy status: Whether it is enabled or not\n- Rules: The conditions that must be met for an action (require approval) to take place\n\n![integrating security scanners - image 8](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097088849.png)\n\u003Ccenter>\u003Ci>Scan Result Policy rules\u003C/i>\u003C/center>\n\u003Cp>\u003C/p>\n\n- Actions: The action to be taken whenever the conditions in the rules (defined vulnerabilities/licenses detected) are met\n\n![integrating security scanners - image 9](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image9_aHR0cHM6_1750097088850.png)\n\n\u003Ccenter>\u003Ci>Scan Result Policy actions\u003C/i>\u003C/center>\n\u003Cp>\u003C/p>\n\n- Override project approval settings: If selected, the following choices will overwrite project settings but only affect the branches selected in the policy\n\n![integrating security scanners - image 11](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750097088851.png)\n\n \u003Ccenter>\u003Ci>Scan Result Policy approval settings\u003C/i>\u003C/center>\n \u003Cp>\u003C/p>\n\n6. Press the \"Configure with a merge request\" button.\n\nOnce the Scan Result Policy has been merged, whenever you create a merge request and the criteria defined in the rules are met, then the defined action will be triggered. In this case, at least one approval will be required from a maintainer before the code can be merged.\n\n![integrated security scanner - image 10](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097089/Blog/Content%20Images/Blog/Content%20Images/image10_aHR0cHM6_1750097088852.png)\n\n\u003Ccenter>\u003Ci>Blocked merge request due to detected vulnerabilities\u003C/i>\u003C/center>\n\u003Cp>\u003C/p>\n\nThe results of a custom scanner can be fully integrated with Scan Result Policies. If the custom scanner detects a vulnerability, then approval will be required before the code can be merged. The scanner you select in a Scan Result Policy must be leveraging the appropriate JSON schema.\n\n## Tutorial: Integrating custom security scanners\n\nNow let’s get to the fun part – integrating a custom security scanner. In this tutorial, you will learn how to create a custom security scanner, as well as how to integrate it with GitLab. We will be leveraging the following projects:\n\n- [Fern Pattern Scanner](https://gitlab.com/gitlab-de/tutorials/security-and-governance/custom-scanner-integration/fern-pattern-scanner): Scans your files looking for specific patterns such as passwords, private keys, and social security numbers.\n- [Secret list](https://gitlab.com/gitlab-de/tutorials/security-and-governance/custom-scanner-integration/secret-list): Contains a list of user passwords, clients, and keys. This project is used to showcase how a custom security scanner can be integrated into GitLab.\n\nYou can watch the following video to see how the application was created and how it is used in detail:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/timMbl5SP-w?si=R2DKtZ5MmBR1rQFL\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### Creating a custom security scanner\n\nNow let’s create a custom scanner that can be integrated into GitLab. Before a custom scanner can be fully integrated with GitLab, the scanner must:\n- scan a directory for defined patterns\n- emit a JSON following the appropriate schema\n- be containerized and accessible\n- provide a template to allow it to be run on another project\n\nWhen the [Fern Pattern scanner](https://gitlab.com/gitlab-de/tutorials/security-and-governance/custom-scanner-integration/fern-pattern-scanner) is run on a project using the provided template, it performs the following steps:\n1. Loads a set of rules which define patterns (regex) to detect.\n- Allow rules to be configurable to meet the changing needs of your organization.\n2. Scans files for defined patterns.\n3. Emits a JSON report following the Secret Detection schema.\n- Go templates are used in this project to create a JSON.\n- Depending on what your scanner will look for, make sure you use the appropriate schema.\n\nOnce the JSON report is loaded as an artifact into GitLab, it will populate the merge request widget, Vulnerability Report, vulnerability pages, Scan Result Policies, and Security dashboards as defined above.\n\n### Integrating a custom security scanner with GitLab\n\nOnce you have created your custom scanner that meets all the needs for integration, you can run it on GitLab.\n\nRunning a custom scanner is as easy as adding a template. We can see how the Fern Pattern scanner template is loaded by examining the `.gitlab-ci.yml` in the [Secret List](https://gitlab.com/gitlab-da/tutorials/security-and-governance/custom-scanner-integration/secret-list) project.\n\n1. Create a [.gitlab-ci.yml file](https://docs.gitlab.com/ee/ci/quick_start/#create-a-gitlab-ciyml-file) in the project you want the scanner to run on.\n2. Include the [Custom Scanner template](https://docs.gitlab.com/ee/ci/yaml/includes.html).\n    - You should also be able to configure the template with environment variables.\n3. Commit the file to the main branch.\n\nOnce the file has been committed, you can see that the custom scanner will run in your pipeline. Once the pipeline is complete, the scanner will populate all the areas defined above in the [GitLab security scanner integration](#gitlab-security-scanner-integration) section.\n\n## Read more\n\nCheck out these resources to learn more about GitLab and the other ways you can extend your DevSecOps platform:\n\n- [Security Scanner GitLab Integration](https://docs.gitlab.com/ee/development/integrations/secure.html)\n- [GitLab Partner Integrations](https://docs.gitlab.com/ee/integration/)\n- [Custom Security Scanner Projects Group](https://gitlab.com/gitlab-de/tutorials/security-and-governance/custom-scanner-integration)\n- [Automatic Response to a Secret Leak](https://docs.gitlab.com/ee/user/application_security/secret_detection/automatic_response.html)\n",[719,693,972,9],{"slug":2631,"featured":91,"template":698},"how-to-integrate-custom-security-scanners-into-gitlab","content:en-us:blog:how-to-integrate-custom-security-scanners-into-gitlab.yml","How To Integrate Custom Security Scanners Into Gitlab","en-us/blog/how-to-integrate-custom-security-scanners-into-gitlab.yml","en-us/blog/how-to-integrate-custom-security-scanners-into-gitlab",{"_path":2637,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2638,"content":2644,"config":2649,"_id":2651,"_type":14,"title":2652,"_source":16,"_file":2653,"_stem":2654,"_extension":19},"/en-us/blog/how-to-leverage-gitlab-duo-for-enhanced-security-reporting",{"title":2639,"description":2640,"ogTitle":2639,"ogDescription":2640,"noIndex":6,"ogImage":2641,"ogUrl":2642,"ogSiteName":685,"ogType":686,"canonicalUrls":2642,"schema":2643},"How to leverage GitLab Duo for enhanced security reporting","Learn how GitLab Duo enables efficient, real-world security reporting for development, operations, and security teams.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098339/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%285%29_1iy516k40hwBDChKcUJ2zb_1750098339103.png","https://about.gitlab.com/blog/how-to-leverage-gitlab-duo-for-enhanced-security-reporting","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How to leverage GitLab Duo for enhanced security reporting\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Valentine Mairet\"},{\"@type\":\"Person\",\"name\":\"David O'Regan\"}],\n        \"datePublished\": \"2024-12-03\",\n      }",{"title":2639,"description":2640,"authors":2645,"heroImage":2641,"date":2178,"body":2647,"category":801,"tags":2648},[2646,1399],"Valentine Mairet","Good security reporting is crucial to maintain a good security posture because it provides detailed insights into incidents. With this information, organizations can better understand vulnerabilities, improve defenses, and prevent similar threats in the future. At GitLab, the [Security division](https://handbook.gitlab.com/handbook/security/#division-structure) has created use cases for GitLab Duo to improve reporting capabilities and enhance operational efficiency. \n\n## GitLab Duo’s security capabilities\n\nThe GitLab Security division uses GitLab’s built-in [incidents](https://docs.gitlab.com/ee/operations/incident_management/incidents.html) to manage and report on security incidents. Incidents are handled, documented, and resolved in GitLab, enabling the use of AI-driven [GitLab Duo](https://about.gitlab.com/gitlab-duo/) as an assistant when performing security operations like incident response. \n\nParticularly in incident analysis and reporting, GitLab Duo is highly efficient and accurate at creating proper documentation and is a great “pair programmer” when solving security incidents.\n\n## GitLab Duo features for security reporting\n\nGitLab Duo offers many features that enhance security reporting:\n\n- **Root Cause Analysis:** GitLab Duo can explain vulnerabilities and understand the context of an incident issue, making it an excellent assistant for performing root cause analyses of security incidents.\n- **Vulnerability Explanation:** Provides detailed insights into identified vulnerabilities, including potential exploitation methods and remediation steps. This feature aids developers and security analysts in understanding and addressing security issues effectively.\n- **Vulnerability Resolution:** Assists in fixing vulnerabilities by generating merge requests that address the identified issues, streamlining the remediation process.\n- **Code Explanation:** Helps users comprehend specific code segments by offering clear explanations, which is particularly useful when dealing with complex or unfamiliar codebases.\n- **Test Generation:** Facilitates early bug detection by generating tests for selected code, ensuring that security vulnerabilities are identified and addressed promptly.\n- **Refactor Code:** Suggests improvements or refactoring for selected code to enhance its quality and maintainability, contributing to a more secure codebase.\n- **Fix Code:** Identifies and rectifies quality issues such as bugs or typos in the selected code, helping maintain a robust and secure codebase.\n\n## Practical use cases\n\nFor the purpose of demonstrating practical use cases, the Security Incident Response Team created a dummy incident with following limited information:\n\n![Incident report](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098346/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750098346297.png)\n\nSeveral comments were added as the team would normally proceed:\n\n![Comments added to report](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098346/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098346297.png)\n\n### Incident reporting\n\nGitLab Duo is able to comprehensively keep track of all information inside an incident issue, including the issue description, comments, and labels. When handling security incidents, information often is all over the place and can change over time. It can easily get lost or overlooked. GitLab Duo is excellent at finding relevant information again to create accurate incident reports. \n\nNavigate to your incident issue and open GitLab Duo Chat. You can engineer your prompt so that GitLab Duo takes your exact reporting requirements into account such as what sections you need and how they should be filled out. Here is an example of the prompt we use at GitLab Security:\n\n> Required sections:\n> - Executive Summary - bottom-line-up-front that is adequate for an audience like senior leadership and CISO\n> - Mitigations & Remediations\n> - Scope & Impact (Environments, customers, team members)\n> - Cause\n> - Detection Capabilities\n>\n> Based on the issue and the required section, write the security incident report. Flag any missing information. \n\nBelow is GitLab Duo’s output that we can use to create our extensive incident report:\n\n\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752175975/Blog/zxiafech08ck9gqjubaz.png\" alt=\"root cause analysis output\" width=\"400\" height=\"1200\">\n\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752175985/Blog/xjc9coah4oi7rz0daw2b.png\" alt=\"root cause analysis output\" width=\"400\" height=\"1200\">\n\n### Root Cause Analysis\n\nGitLab Duo is able to explain vulnerabilities and can understand the context of an incident issue. These capabilities make GitLab Duo an excellent assistant to perform root cause analyses of security incidents. \n\nAs seen above, GitLab Duo can write its own section on the cause of an incident based entirely on the issue contents. We can dig into this further with the assistance of GitLab Duo to identify several candidates for root cause(s).  \n\nOur prompt is built as such:\n\n> “The root cause of the incident was the exposure of a personal access token in CI job logs. The token was leaked in a project that was not included in the current list of projects for token detection, indicating a gap in the security monitoring process.”\n> Name a few potential root causes for a PAT finding itself in CI job logs.\n\nGitLab Duo’s response is the following:\n\n![GitLab Duo Root Cause Analysis response](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098346/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750098346298.png)\n\nGitLab Duo Root Cause Analysis can help security teams identify avenues worth investigating. It has a pretty good grasp on security concepts and how Root Cause Analysis is led in general, so it can be a great assistant for these situations where the exact root of the problem is unclear. \n\n> Learn more about [GitLab Duo Root Cause Analysis](https://about.gitlab.com/blog/developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd/).\n\n### On-demand updates\n\nSecurity incidents move fast and can be confusing, and information is constantly changing and shifting around. Higher severity incidents require a certain degree of thoroughness when it comes to reporting to senior leadership, especially when the audience is at the Director+ level. Engineers who are in the midst of a security incident can find it difficlut to condense the information such that it is digestable for executives. \n\nWe saw above that GitLab Duo is capable of delivering a pretty good executive summary. When the incident is ongoing, we need to deliver regular updates to senior leadership on the incident status and next steps. GitLab Duo is a great help for that, as well. If information is scattered across the issue in the form of a description or comments, GitLab Duo can help reassemble this information into the “bottom-line-up-front,” or BLUF summary, we need for executive updates. \n\nWe’ve taken the same incident right before token revocation and asked GitLab Duo for a BLUF summary where the audience is the Director of Security Operations. \n\n![Executive Summary - GitLab Duo](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098346/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750098346299.png)\n\n## Getting started with GitLab Duo for security\n\nGitLab Security has automated several parts of the reporting process with the help of GitLab Duo. But to get started, all you need is access to GitLab Duo Chat. GitLab Duo Chat can be your well-informed assistant for many security reporting cases and post-mortem analyses.\n\n## What’s next for GitLab Duo?\n\nGitLab is committed to continuously enhancing GitLab Duo’s capabilities. Future developments aim to integrate AI-driven features more deeply into the security workflow, providing proactive detection and resolution of vulnerabilities, streamlined incident management, and comprehensive reporting tools. These advancements will further empower security teams to maintain robust security postures and respond effectively to emerging threats.\n\n> [Try GitLab Duo for free](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/)!\n",[803,719,693,695,9],{"slug":2650,"featured":6,"template":698},"how-to-leverage-gitlab-duo-for-enhanced-security-reporting","content:en-us:blog:how-to-leverage-gitlab-duo-for-enhanced-security-reporting.yml","How To Leverage Gitlab Duo For Enhanced Security Reporting","en-us/blog/how-to-leverage-gitlab-duo-for-enhanced-security-reporting.yml","en-us/blog/how-to-leverage-gitlab-duo-for-enhanced-security-reporting",{"_path":2656,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2657,"content":2663,"config":2669,"_id":2671,"_type":14,"title":2672,"_source":16,"_file":2673,"_stem":2674,"_extension":19},"/en-us/blog/how-to-scan-a-full-commit-history-to-detect-sensitive-secrets",{"title":2658,"description":2659,"ogTitle":2658,"ogDescription":2659,"noIndex":6,"ogImage":2660,"ogUrl":2661,"ogSiteName":685,"ogType":686,"canonicalUrls":2661,"schema":2662},"How to scan a full commit history to detect sensitive secrets","Use GitLab Secret Detection to scan a repository's commit history, including branches. View results within the GitLab UI with just a few lines of code added to a pipeline file.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097948/Blog/Hero%20Images/Blog/Hero%20Images/REFERENCE%20-%20display%20preview%20for%20blog%20images%20%281%29_2XDPsbkjQ3o6tcdom6IGxI_1750097948673.png","https://about.gitlab.com/blog/how-to-scan-a-full-commit-history-to-detect-sensitive-secrets","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How to scan a full commit history to detect sensitive secrets\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Noah Ing\"},{\"@type\":\"Person\",\"name\":\"Jerez Solis\"}],\n        \"datePublished\": \"2025-02-06\",\n      }",{"title":2658,"description":2659,"authors":2664,"heroImage":2660,"date":2666,"body":2667,"category":693,"tags":2668},[1357,2665],"Jerez Solis","2025-02-06","Secrets left exposed in outdated repositories pose significant risk for data\nbreaches. For example, a still-active secret key can be exposed, leaving it\nvulnerable to exploitation. Secrets include access keys, API tokens, private\nkeys, and other sensitive values. \n\n\nIn this article, you'll learn how to use GitLab Secret Detection to scan a\nrepository’s full commit history, including all branches, to detect\nsensitive secrets. In addition, you will discover how to view the results\ndirectly within the GitLab UI without the need for any integration. All it\ntakes is just a couple of lines of code in your `.gitlab-ci.yml` pipeline\nfile. \n\n\n## Scan every corner of your repository\n\n\nWe will use the sample repository shown in the screenshot below as an\nexample. To keep things simple, there is only a `README.md` file present in\nthe default branch of this repository. \n\n\n![Sample repository to\nscan](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097956/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097955851.png)\n\n\nAt first glance, it may seem like the repository is empty and that there are\nprobably no sensitive secrets in this repository. But what we are looking at\nis only the state of the default branch, which is the main branch in this\nexample. There could be feature branches in this repository created weeks,\nmonths, or years ago with sensitive secrets. It is also possible that a file\nwith a secret was accidentally pushed to the repo and then deleted right\nafter. However, it likely was not deleted correctly and is still in the\ncommit history.\n\n\nWe are going to enable GitLab Secret Detection scanner and set the\n`SECRET_DETECTION_HISTORIC_SCAN` variable to **true** so that the content of\nall branches in the repository is scanned.\n\n\n![Enable GitLab Secret Detection variable to\ntrue](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097956/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097955853.png)\n\n\n```\n\ninclude:\n  - template: Jobs/Secret-Detection.gitlab-ci.yml\nsecret_detection:\n  variables:\n    SECRET_DETECTION_HISTORIC_SCAN: \"true\"\n```\n\n\nBy setting the `SECRET_DETECTION_HISTORIC_SCAN` variable to **true**, GitLab\nSecret Detection looks into every branch and commit of your repository. It\nensures that no sensitive information — whether from a feature branch or an\nold commit — is left unchecked.\n\n\n## Results of the scan\n\n\nTwo sensitive secrets were identified in the repository. One is a password\nin a `.env` file that was deleted from the repository, but the commit\ncontaining it was not removed from the git history. The other is an AWS\nAccess Token found in a feature branch. These exposed secrets could\ncompromise the organization’s security. \n\n\n![AWS Access Token\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097956/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097955855.png)\n\n\nYou can click on the AWS Access Token result to see more details, including\nthe file location. You can also create a GitLab issue to triage the\nvulnerability with one click. If you’re using the Jira integration, you can\ncreate a Jira ticket directly from the vulnerability page as well.\n\n\n## Why scanning for secrets matters\n\n\nAnyone with access to the repository can misuse the secret to gain\nunauthorized access to private resources and sensitive data. \n\n\nIn addition to scanning a repository’s full commit history across all\nbranches, GitLab Secret Detection also helps you take a multilayered\napproach to detecting secrets:\n\n\n* [Secret push\nprotection](https://docs.gitlab.com/ee/user/application_security/secret_detection/secret_push_protection/index.html)\n- scans commits for secrets during a push and blocks it if secrets are\ndetected, unless skipped, reducing the risk of leaks.  \n\n* [Pipeline secret\ndetection](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/index.html)\n- scans files after they’ve been committed and pushed to a GitLab\nrepository.\n\n* [Client-side secret\ndetection](https://docs.gitlab.com/ee/user/application_security/secret_detection/client/index.html)\n- scans comments and descriptions in issues and merge requests for secrets\nbefore they're saved to GitLab.  * [Automatic response to leaked\nsecrets](https://docs.gitlab.com/ee/user/application_security/secret_detection/automatic_response.html)\n- automatically revokes certain types of leaked secrets and notifies the\npartner that issued the secret. \n\n\nYou can adjust pipeline secret detection to suit your needs by modifying,\nextending, or replacing the default ruleset. For instance, you can define\n[custom\nrules](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/index.html#customize-analyzer-rulesets)\nusing regex patterns to detect sensitive data like credit card numbers,\nphone numbers, or other information specific to your organization.\n\n\n## Try GitLab Secret Detection\n\n\n1.\n[Enable](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/#enable-the-analyzer)\nSecret Detection in your GitLab pipeline.  \n\n2. Set `SECRET_DETECTION_HISTORIC_SCAN: true`.  \n\n3. Push and trigger a pipeline to scan all branches and commits.\n\n\nGitLab makes securing your code simple and comprehensive. Don’t let an old\nbranch or commit compromise your security — give historical scans a try\ntoday!\n\n\n> #### [Sign up for a free trial of GitLab\nUltimate](https://about.gitlab.com/free-trial/) to get started with security\nscanners like Secret Detection.\n",[930,719,9,695],{"slug":2670,"featured":6,"template":698},"how-to-scan-a-full-commit-history-to-detect-sensitive-secrets","content:en-us:blog:how-to-scan-a-full-commit-history-to-detect-sensitive-secrets.yml","How To Scan A Full Commit History To Detect Sensitive Secrets","en-us/blog/how-to-scan-a-full-commit-history-to-detect-sensitive-secrets.yml","en-us/blog/how-to-scan-a-full-commit-history-to-detect-sensitive-secrets",{"_path":2676,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2677,"content":2683,"config":2688,"_id":2690,"_type":14,"title":2691,"_source":16,"_file":2692,"_stem":2693,"_extension":19},"/en-us/blog/how-to-successfully-deliver-your-software-development-roadmap",{"title":2678,"description":2679,"ogTitle":2678,"ogDescription":2679,"noIndex":6,"ogImage":2680,"ogUrl":2681,"ogSiteName":685,"ogType":686,"canonicalUrls":2681,"schema":2682},"How to successfully deliver your software development roadmap","Here are three common blockers and how to overcome them to fully realize the ROI of a DevSecOps platform investment.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749669103/Blog/Hero%20Images/AdobeStock_243118595.jpg","https://about.gitlab.com/blog/how-to-successfully-deliver-your-software-development-roadmap","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How to successfully deliver your software development roadmap\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"David DeSanto, Chief Product Officer, GitLab\"}],\n        \"datePublished\": \"2024-04-04\",\n      }",{"title":2678,"description":2679,"authors":2684,"heroImage":2680,"date":2685,"body":2686,"category":1277,"tags":2687},[1869],"2024-04-04","2024 is shaping up to be the year of DevSecOps, where more organizations realize the full potential of blending development, security, and operations through the adoption of a comprehensive platform. This is when teams will move beyond using just source code management (SCM) and tap into all the [AI-powered features](https://about.gitlab.com/topics/devops/the-role-of-ai-in-devops/) available across the software development lifecycle (SDLC), delivering better, more secure software faster. But first organizations have to knock down the blockers that can get in the way of successful [DevSecOps](https://about.gitlab.com/topics/devsecops/) adoption.\n\nIn talking to customers at organizations of all sizes, I've heard three main blockers:\n* The potential of AI is believable, but right now it seems limited to code creation and that has limited impact as there is more to the SDLC.\n* A platform seems like a great idea, but forcing my development, security, and operations team to give up their preferred tools all at once will undoubtedly cause a revolt. Yet, without everyone on the same platform, the investment is hard to justify.\n* Regulations and compliance makes it difficult to leverage a DevSecOps SaaS solution, and being on a multi-tenant solution is a non-starter for us as we are in a highly regulated industry. However, the overhead of self-hosting a DevSecOps platform is becoming untenable at our scale.\n\nWhile legitimate concerns, these blockers can be eliminated by combining DevSecOps practices and a platform approach. \"Making sure that we spend our money wisely is very, very important. GitLab allowed us to reduce our costs and centralize our work in one place. It’s been money well spent,\" says Andy Chow, Technology Chief of Staff at global fintech company Airwallex.\n\nLet's dig deeper into each blocker and see how it is resolved with a DevSecOps platform.\n\n> Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Watch today!](https://about.gitlab.com/seventeen/)\n\n### AI is not limited: It is having real impact across the SDLC\n\nWe know that AI is already improving the developer experience but there is so much more that AI can do across the entire SDLC. With AI, organizations can unburden development, security, and operations teams from tedious tasks by taking advantage of the efficiencies that AI provides. For instance, users can access summaries of comments in merge requests, have tests generated, refactor sections of code, and perform other time-saving actions.\n\nThat's why with [GitLab Duo](https://about.gitlab.com/gitlab-duo/), our suite of AI-powered workflows, we focus on more than just code creation — after all, [code creation only accounts for 25% of a developer's time](https://about.gitlab.com/blog/gitlab-global-devsecops-ai-report/). There is so much more that happens in the SDLC where AI can add efficiency. For instance, development, security, and operations teams that use AI-powered capabilities, such as Vulnerability Remediation and Root Cause Analysis, share that they can find and resolve vulnerabilities earlier and identify CI/CD pipeline failures faster and in a more collaborative manner.\n\n### Forget one-size-fits all, migrate your way\n\nRealizing the benefit of a DevSecOps platform is not a one-size-fits-all. You can customize your deployment to fit your organization's needs and where you are in your digital transformation journey — choosing one team at a time to adopt the platform or a full cutover. I have advice, though: Commit to using more than just source code management. A DevSecOps platform is a robust solution that includes enterprise agile planning, CI/CD, security and compliance, value stream analytics, and more. Also, make sure that as you deploy your platform, your users agree to get familiar with its range of capabilities — and not still maintain [a complex toolchain](https://about.gitlab.com/blog/battling-toolchain-technical-debt/).\n\nThe way to extract the most ROI and satisfaction from your migration is to show users how to get the functionality they had in their other tools from within the DevSecOps platform. To that end, we've increased our resources to support you. From [in-depth tutorials](https://about.gitlab.com/blog/tags/tutorial/) to [clear reference architectures](https://docs.gitlab.com/ee/administration/reference_architectures/), we have a vast library of content (including videos) for you to draw upon to help your users acclimate to and thrive in the DevSecOps environment. \n\nWe've also made it easier to onboarding teams, with capabilities like [remote development environments](https://about.gitlab.com/blog/quick-start-guide-for-gitlab-workspaces/), enabling organizations to reduce adoption friction. In fact, as more teams within your organization adopt GitLab, consider expanding access for other critical functions that contribute to delivering software value such as Finance, Legal, and Marketing teams. The power of a DevSecOps platform is giving everyone visibility into the SDLC which drives better collaboration, improves planning, reduces security risk, improves team velocity, and leads to faster time-to-value. This means your teams are happier and so are the users of the applications you build, secure, and deploy using GitLab.\n\n> Read how the U.S. Navy's Black Pearl [sped up onboarding using GitLab](https://about.gitlab.com/blog/u-s-navy-black-pearl-lessons-in-championing-devsecops/).\n\n**Note:** GitLab doesn't have to be introduced to the organization by the development team. For instance, if security teams want vulnerabilities identified and mitigated earlier in the lifecycle or increased compliance via security scanning, they can recommend that developers use the DevSecOps platform. [Read how U.K. retailer Dunelm made this happen](https://about.gitlab.com/customers/dunelm/).\n\n### Multi-tenancy is just one option; single-tenancy can address regulatory requirements\n\nKeeping software up-to-date and secure while maintaining compliance with strict regulations can make self-hosting a challenge. In June 2023, we launched [GitLab Dedicated](https://about.gitlab.com/dedicated/), our single-tenant SaaS solution, into general availability to address the needs of organizations in highly regulated industries like finance and healthcare, and in highly regulated geographies such as the European Union. GitLab Dedicated provides the secure environment organizations need for regulatory compliance, including control over data residency and isolation, while removing the overhead of self-hosting.\n\nDedicated customers are upgraded automatically every month, which means they have all of the benefits without the administration overhead. Furthermore, GitLab Dedicated comes with [GitLab Ultimate](https://about.gitlab.com/pricing/ultimate/), enabling organizations to ship secure software faster with built-in compliance visibility and controls as well as advanced security scanning capabilities.\n\n> Learn the [origins of GitLab Dedicated](https://about.gitlab.com/blog/building-gitlab-with-gitlabcom-how-gitlab-inspired-dedicated/) and how it has grown into the solution highly regulated organizations need.\n\n### Try GitLab today\n\nAs you proceed with your software development roadmap for 2024 (and beyond), consider what an AI-powered DevSecOps platform could do for your organization. Also keep an eye on [our Direction page](https://about.gitlab.com/direction/#fy25-rd-investment-themes) to learn about what’s coming next and [our monthly release posts](https://about.gitlab.com/blog/categories/devsecops-platform/) to learn about the latest and greatest available.\n\n> Start your trial of [GitLab Duo Pro](https://about.gitlab.com/gitlab-duo/#free-trial) or [GitLab Ultimate](https://gitlab.com/-/trials/new) for free today.\n",[803,951,9,693],{"slug":2689,"featured":91,"template":698},"how-to-successfully-deliver-your-software-development-roadmap","content:en-us:blog:how-to-successfully-deliver-your-software-development-roadmap.yml","How To Successfully Deliver Your Software Development Roadmap","en-us/blog/how-to-successfully-deliver-your-software-development-roadmap.yml","en-us/blog/how-to-successfully-deliver-your-software-development-roadmap",{"_path":2695,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2696,"content":2702,"config":2707,"_id":2709,"_type":14,"title":2710,"_source":16,"_file":2711,"_stem":2712,"_extension":19},"/en-us/blog/how-to-use-gitlabs-custom-compliance-frameworks-in-your-devsecops",{"title":2697,"description":2698,"ogTitle":2697,"ogDescription":2698,"noIndex":6,"ogImage":2699,"ogUrl":2700,"ogSiteName":685,"ogType":686,"canonicalUrls":2700,"schema":2701},"How to use GitLab's Custom Compliance Frameworks in your DevSecOps environment","Explore how new frameworks, along with more than 50 out-of-the-box controls, transform regulatory requirements from burdensome checkboxes to integrated, automated workflow components.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097104/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%284%29_3LZkiDjHLjhqEkvOvBsVKp_1750097104092.png","https://about.gitlab.com/blog/how-to-use-gitlabs-custom-compliance-frameworks-in-your-devsecops","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How to use GitLab's Custom Compliance Frameworks in your DevSecOps environment\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2025-04-30\",\n      }",{"title":2697,"description":2698,"authors":2703,"heroImage":2699,"date":2704,"body":2705,"category":693,"tags":2706},[1599],"2025-04-30","Compliance isn't just a checkbox — it's a critical business function that affects everything from operational risk to customer trust. For development teams, balancing compliance requirements with velocity can be particularly challenging. GitLab's [Custom Compliance Frameworks](https://about.gitlab.com/blog/introducing-custom-compliance-frameworks-in-gitlab/) offer a powerful way to integrate compliance verification directly into your development workflow. In this article you'll learn what they are and how to use them for maximum efficiecy.\n\n## What are GitLab Custom Compliance Frameworks?\n\nGitLab Custom Compliance Frameworks allow organizations to define, implement, and enforce compliance standards directly within their GitLab instance. This feature extends GitLab's built-in compliance capabilities by enabling teams to create customized frameworks that align with specific regulatory requirements, internal policies, or industry standards.\n\nCustom Compliance Frameworks have the following benefits:\n* Reduce manual tracking  \n* Accelerate audit readiness  \n* Enforce compliance controls natively\n\n![Compliance center screenshot with frameworks listed](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097114/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097114254.png)\n\nWith this release, more than 50 out-of-the-box (OOTB) controls are provided (with more coming soon) that can be tailored to your organization's unique compliance needs, including HIPAA in healthcare, GDPR for data privacy, SOC2 for service organizations, or industry-specific regulations. Some examples of OOTB controls include:\n\n* Separation of duties (e.g., at least two approvers and author approved merge request)  \n* Security scanners running (e.g., [SAST](https://docs.gitlab.com/user/application_security/sast/) running and [Dependency Scanning](https://docs.gitlab.com/user/application_security/dependency_scanning/) running)  \n* Authentication/authorization (e.g., project visibility not public and AuthSSO required)  \n* Application configuration (e.g., status checks required and Terraform required)\n\nAdditionally, you can configure external environmental controls using the GitLab API to check the status and details of an external environment.\n\n## Creating a Custom Compliance Framework from scratch\n\nNow that we understand the value, let's explore how to implement Custom Compliance Frameworks in your GitLab environment. We will use this demo application and you can follow along in this video. \n\n**Note:** A GitLab Ultimate subscription is required.\n\n\u003C!-- TODO: EMBED_YT_VIDEO -->\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/bSwwv5XeMdQ?si=unDwCltF4vTHT4mB\" title=\"Adhering to compliance requirements with built-in compliance controls\n\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n**Step 1: Define your compliance requirements**\n\nBefore building your custom framework, you need to clearly define your compliance requirements:\n\n1. **Identify applicable regulations:** Determine which regulations and standards apply to your organization (e.g., GDPR, PCI DSS, and HIPAA). \n2. **Map requirements to controls:** Break down each regulation into specific, actionable controls.  \n3. **Prioritize requirements:** Focus on high-risk areas and requirements with the greatest impact.\n\n**Step 2: Create your Custom Compliance Framework**\n\nTo create a custom compliance framework in GitLab:\n\n1. Navigate to your GitLab group's **Secure > Compliance Center** section.  \n2. Press the **New framework** button.  \n3. Select **Create blank framework**.\n\n![Create a custom compliance framework screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097114/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750097114255.png)\n\n4. Provide a name, description, and color for your framework.\n\n![New compliance framework screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097114/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750097114257.png)\n\n5. Add a requirement to the framework:  \n   a. Scroll down to the **Requirements** tab.\n\n   b. Press the **New requirement** button.\n\n   c. Provide a name and description.  \n   d. Under the **Controls** section, select **Choose a GitLab control**.  \n   e. Select a control from the list (e.g., at least two approvals, SAST running).  \n   f. Press the **Create requirement** button.\n\n![Create new requirement button](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097114/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097114258.png)\n\n6. Press the **Create framework** button.\n\nThe framework will be created as specified and will now be available to add to projects. Additionally, compliance frameworks can be [imported](http://TODO) using a JSON with the appropriate schema.\n\n**Step 3: Apply the framework to projects**\n\nOnce your framework is created:\n1. From the Compliance Center, select the **Projects** tab.  \n2. Use the search bar to **Search** or **Filter** results.  \n3. Select the project(s) you wish to apply your framework to.  \n4. Press the **Choose one bulk action** button.  \n5. Select **Apply frameworks to selected projects**.  \n6. Press the **Select frameworks** button.  \n7. Select your framework(s) from the list.  \n8. Press the **Apply** button.\n\n![Compliance center screen with SOC 2 framework dropdown](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097114/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750097114260.png)\n\nThe framework will now be applied to the project, making its requirements visible and trackable.\n\n**Step 4: Monitor and report on compliance**\n\nWith your framework in place, you can now:\n\n1. Use the **Compliance Center** to track compliance status across projects including details and suggested fixes for failed controls.\n2. Generate **compliance reports** for audits and stakeholder reviews.  \n3. Set up **compliance alerts** to notify stakeholders of potential compliance issues. \n4. Review **audit events** to overview action taken on compliance settings.\n\n![Compliance Center screen showing SOC2 test framework](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097114/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097114263.png)\n\n## Real-world example: Implement a SOC2 compliance framework\n\nSystem and Organization Controls 2, better known as SOC2, is a rigorous auditing standard developed by the American Institute of Certified Public Accountants that assesses a service organization's controls related to security, availability, processing integrity, confidentiality, and privacy. You can read my [Guide to fulfilling SOC 2 security requirements with GitLab](https://about.gitlab.com/blog/guide-to-fulfilling-soc-2-security-requirements-with-gitlab/) to learn more.\n\nNow, let's review a practical example of using a Custom Compliance Framework to verify SOC2 security compliance, which requires:\n\n* implementation of controls to protect against unauthorized access  \n* establishment of procedures for identifying and mitigating risks  \n* setting up systems for detecting and addressing security incidents\n\n**Disclaimer:** This is only an example showcasing some of the controls possible for adhering to SOC2. Be sure to consult with your security/compliance team before moving any implementation to production.\n\nA Custom Compliance Framework for SOC2 will look as follows using some GitLab OOTB controls:\n\n* **Name:** SOC2 Security Requirements  \n* **Description:** Adds the security requirements for SOC2 framework compliance  \n* **Requirements:**  \n  * **Implement controls to protect against unauthorized access**  \n    * Auth SSO enabled  \n    * CI/CD job token scope enabled  \n    * Require MFA at org level  \n  * **Establish procedures for identifying and mitigating risks**  \n    * At least two approvals  \n    * Author approved merge request  \n    * Committers approved merge request  \n    * Default branch protected  \n  * **Setting up systems for detecting and addressing security incidents**  \n    * Dependency Scanning running  \n    * SAST running  \n    * DAST running\n\nWhen applied to your project(s), this framework allows you to oversee if/and when they fall out of compliance and what can be done to bring them back into compliance. Note that you can create and apply multiple compliance frameworks to a project(s). For example, you can have one for SOC2 process integrity requirements.\n\n## Implement security policies to ensure compliance requirements are met\n\nAlthough not required, security policies can be applied to projects containing a Custom Compliance Framework. This allows you to assure that certain compliance criteria will be enforced via security policies. For example, you can force security scanners to run on projects that contain a Custom Compliance Framework requiring security scanning. \n\nGitLab provides various different security policies:\n\n* [Scan execution policy](https://docs.gitlab.com/user/application_security/policies/scan_execution_policies/): Enforces security scans, either as part of the pipeline or on a specified schedule.  \n* [Merge request approval policy](https://docs.gitlab.com/user/application_security/policies/merge_request_approval_policies/): Enforces project-level settings and approval rules based on scan results.  \n* [Pipeline execution policy](https://docs.gitlab.com/user/application_security/policies/pipeline_execution_policies/): Enforces CI/CD jobs as part of project pipelines. \n* [Vulnerability management policy](https://docs.gitlab.com/user/application_security/policies/vulnerability_management_policy/): Automatically resolves vulnerabilities that are no longer detected in the default branch.\n\nLet’s go ahead and force a SAST scanner to run in order to automatically adhere to any requirements that require SAST scanning. To create a security policy and apply it to a project with a particular framework:\n\n1. Navigate to a project that has a Custom Compliance Framework requiring **SAST scanning**. \n2. In the sidebar, select **Secure > Policies**.  \n3. Press the **New policy** button.  \n4. Under **Scan execution policy**, press the **Select policy** button. \n5. Fill in the **Name** and **Description**. \n6. Under **Actions**, select **SAST** as the scan to run.\n\n![Actions screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097114/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750097114264.png)\n\n7. Under **Conditions**, select the pipeline to be triggered when a pipeline runs for all branches.\n\n![Conditions screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097114/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097114265.png)\n\n8. Press the **Configure with a merge request** button.  \n9. An MR is now created in a separate project containing all the security policies scoped to this project.\n10. Press the **Merge** button.\n\nNow SAST will run for every branch, assuring you are compliant in that area. Be sure to review all the different types of security policies and see how they can suit your requirements.\n\n## 5 best practices to follow\n\nTo maximize the value of Custom Compliance Frameworks:\n\n1. **Start small:** Begin with one critical regulation or standard before expanding.  \n2. **Involve key stakeholders:** Include compliance, security, and development teams in framework creation.  \n3. **Automate where possible:** Use GitLab CI/CD to automate compliance checks.  \n4. **Document thoroughly:** Maintain clear documentation of how your framework maps to regulatory requirements.  \n5. **Review regularly:** Update your frameworks as regulations evolve or new requirements emerge.\n\n## Get started today\n\nGitLab Custom Compliance Frameworks represent a significant advancement in DevSecOps by bringing compliance directly into the development workflow. By implementing custom frameworks, organizations can reduce compliance overhead, improve risk management, and accelerate development cycles while maintaining robust compliance with regulatory requirements.\n\nThe ability to define and enforce Custom Compliance Frameworks gives teams the flexibility they need to address their specific regulatory landscape while providing the structure necessary to ensure consistent compliance practices across the organization.\n\nAs regulatory requirements continue to grow in complexity, tools like GitLab Custom Compliance Frameworks will become increasingly essential for organizations looking to balance compliance requirements with development velocity in a sustainable way.\n\n> To try Custom Compliance Frameworks today, sign up for your [free  trial of GitLab Ultimate](https://about.gitlab.com/free-trial/).\n\n## Learn more\n\nVisit these resources to learn more about Custom Compliance Frameworks and how they can benefit your organization:\n\n* [Custom Compliance Frameworks documentation](https://docs.gitlab.com/user/compliance/compliance_center/compliance_status_report/)  \n* [Custom Compliance Frameworks epic](https://gitlab.com/groups/gitlab-org/-/epics/13295)  \n* [Security Policies documentation](https://docs.gitlab.com/user/application_security/policies/)  \n* [GitLab Security and Compliance solutions](https://about.gitlab.com/solutions/security-compliance/)",[693,719,9,695,717],{"slug":2708,"featured":91,"template":698},"how-to-use-gitlabs-custom-compliance-frameworks-in-your-devsecops","content:en-us:blog:how-to-use-gitlabs-custom-compliance-frameworks-in-your-devsecops.yml","How To Use Gitlabs Custom Compliance Frameworks In Your Devsecops","en-us/blog/how-to-use-gitlabs-custom-compliance-frameworks-in-your-devsecops.yml","en-us/blog/how-to-use-gitlabs-custom-compliance-frameworks-in-your-devsecops",{"_path":2714,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2715,"content":2721,"config":2730,"_id":2732,"_type":14,"title":2733,"_source":16,"_file":2734,"_stem":2735,"_extension":19},"/en-us/blog/how-we-decreased-gitlab-repo-backup-times-from-48-hours-to-41-minutes",{"title":2716,"description":2717,"ogTitle":2716,"ogDescription":2717,"noIndex":6,"ogImage":2718,"ogUrl":2719,"ogSiteName":685,"ogType":686,"canonicalUrls":2719,"schema":2720},"How we decreased GitLab repo backup times from 48 hours to 41 minutes","Learn how we tracked a performance bottleneck to a 15-year-old Git function and fixed it, leading to enhanced efficiency that supports more robust backup strategies and can reduce risk.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097166/Blog/Hero%20Images/Blog/Hero%20Images/REFERENCE%20-%20display%20preview%20for%20blog%20images%20%282%29_2pKf8RsKzAaThmQfqHIaa7_1750097166565.png","https://about.gitlab.com/blog/how-we-decreased-gitlab-repo-backup-times-from-48-hours-to-41-minutes","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How we decreased GitLab repo backup times from 48 hours to 41 minutes\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Karthik Nayak\"},{\"@type\":\"Person\",\"name\":\"Manuel Kraft\"}],\n        \"datePublished\": \"2025-06-05\",\n      }",{"title":2716,"description":2717,"authors":2722,"heroImage":2718,"date":715,"body":2725,"category":928,"tags":2726},[2723,2724],"Karthik Nayak","Manuel Kraft","Repository backups are a critical component of any robust disaster recovery strategy. However, as repositories grow in size, the process of creating reliable backups becomes increasingly challenging.  Our own [Rails repository](https://gitlab.com/gitlab-org/gitlab) was taking 48 hours to back up — forcing impossible choices between backup frequency and system performance. We wanted to tackle this issue for our customers and for our own users internally. \n\nUltimately, we traced the issue to a 15-year-old Git function with O(N²) complexity and fixed it with an algorithmic change, __reducing backup times exponentially__. The result: lower costs, reduced risk, and backup strategies that actually scale with your codebase.\n\nThis turned out to be a Git scalability issue that affects anyone with large repositories. Here's how we tracked it down and fixed it. \n\n## Backup at scale\n\nFirst, let's look at the problem. As organizations scale their repositories and backups grow more complex, here are some of the challenges they can face:\n\n* **Time-prohibitive backups:** For very large repositories, creating a repository backup could take several hours, which can hinder the ability to schedule regular backups. \n* **Resource intensity:** Extended backup processes can consume substantial server resources, potentially impacting other operations.\n* **Backup windows:** Finding adequate maintenance windows for such lengthy processes can be difficult for teams running 24/7 operations.\n* **Increased failure risk:** Long-running processes are more susceptible to interruptions from network issues, server restarts, and system errors, which can force teams to restart the entire very long backup process from scratch.\n* **Race conditions:** Because it takes a long time to create a backup, the repository might have changed a lot during the process, potentially creating an invalid backup or interrupting the backup because objects are no longer available.\n\nThese challenges can lead to compromising on backup frequency or completeness – an unacceptable trade-off when it comes to data protection. Extended backup windows can force customers into workarounds. Some might adopt external tooling, while others might reduce backup frequency, resulting in potential inconsistent data protection strategies across organizations.\n\nNow, let's dig into how we identified a performance bottleneck, found a resolution, and deployed it to help cut backup times.\n\n## The technical challenge\n\nGitLab's repository backup functionality relies on the [`git bundle create`](https://git-scm.com/docs/git-bundle) command, which captures a complete snapshot of a repository, including all objects and references like branches and tags. This bundle serves as a restoration point for recreating the repository in its exact state.\n\nHowever, the implementation of the command suffered from poor scalability related to reference count, creating a performance bottleneck. As repositories accumulated more references, processing time increased exponentially. In our largest repositories containing millions of references, backup operations could extend beyond 48 hours.\n\n### Root cause analysis\n\nTo identify the root cause of this performance bottleneck, we analyzed a flame graph of the command during execution.\n\n![Flame graph showing command during execution](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097176/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097176388.jpg)\n\nA flame graph displays the execution path of a command through its stack trace. Each bar corresponds to a function in the code, with the bar's width indicating how much time the command spent executing within that particular function.\n\nWhen examining the flame graph of `git bundle create` running on a repository with 10,000 references, approximately 80% of the execution time is consumed by the `object_array_remove_duplicates()` function. This function was introduced to Git in the [commit b2a6d1c686](https://gitlab.com/gitlab-org/git/-/commit/b2a6d1c686) (bundle: allow the same ref to be given more than once, 2009-01-17).\n\nTo understand this change, it's important to know that `git bundle create` allows users to specify which references to include in the bundle. For complete repository bundles, the `--all` flag packages all references.\n\nThe commit addressed a problem where users providing duplicate references through the command line – such as `git bundle create main.bundle main main` - would create a bundle without properly handling the duplicated main reference. Unbundling this bundle in a Git repository would break, because it tries to write the same ref twice. The code to avoid duplication uses nested `for` loops that iterate through all references to identify duplicates. This O(N²) algorithm becomes a significant performance bottleneck in repositories with large reference counts, consuming substantial processing time.\n\n### The fix: From O(N²) to efficient mapping\n\nTo resolve this performance issue, we contributed an upstream fix to Git that replaces the nested loops with a map data structure. Each reference is added to the map, which automatically ensures only a single copy of each reference is retained for processing.\n\nThis change dramatically enhances the performance of `git bundle create` and enables much better scalability in repositories with large reference counts. Benchmark testing on a repository with 10,000 references demonstrates a 6x performance improvement.\n\n```shell\nBenchmark 1: bundle (refcount = 100000, revision = master)\n  Time (mean ± σ): \t14.653 s ±  0.203 s\t[User: 13.940 s, System: 0.762 s]\n  Range (min … max):   14.237 s … 14.920 s\t10 runs\n\nBenchmark 2: bundle (refcount = 100000, revision = HEAD)\n  Time (mean ± σ):  \t2.394 s ±  0.023 s\t[User: 1.684 s, System: 0.798 s]\n  Range (min … max):\t2.364 s …  2.425 s\t10 runs\n\nSummary\n  bundle (refcount = 100000, revision = HEAD) ran\n\t6.12 ± 0.10 times faster than bundle (refcount = 100000, revision = master)\n```\n\nThe patch was accepted and [merged](https://gitlab.com/gitlab-org/git/-/commit/bb74c0abbc31da35be52999569ea481ebd149d1d) into upstream Git. At GitLab, we backported this fix to ensure our customers could benefit immediately, without waiting for the next Git release.\n\n## The result: Dramatically decreased backup times\n\nThe performance gains from this improvement have been nothing short of transformative:\n\n* **From 48 hours to 41 minutes:** Creating a backup of our largest repository (`gitlab-org/gitlab`) now takes just 1.4% of the original time.\n* **Consistent performance:** The improvement scales reliably across repository sizes.\n* **Resource efficiency:** We significantly reduced server load during backup operations.\n* **Broader applicability:** While backup creation sees the most dramatic improvement, all bundle-based operations that operate on many references benefit.\n\n## What this means for GitLab customers\n\nFor GitLab customers, this enhancement delivers immediate and tangible benefits on how organizations approach repository backup and disaster recovery planning:\n* **Transformed backup strategies**   \n  * Enterprise teams can establish comprehensive nightly schedules without impacting development workflows or requiring extensive backup windows.   \n  * Backups can now run seamlessly in the background during nightly schedules, instead of needing to be dedicated and lengthy.  \n* **Enhanced business continuity**  \n  * With backup times reduced from days to minutes, organizations significantly minimize their recovery point objectives (RPO). This translates to reduced business risk – in a disaster scenario, you're potentially recovering hours of work instead of days.  \n* **Reduced operational overhead**   \n  * Less server resource consumption and shorter maintenance windows.  \n  * Shorter backup windows mean reduced compute costs, especially in cloud environments, where extended processing time translates directly to higher bills.  \n* **Future-proofed infrastructure**   \n  * Growing repositories no longer force difficult choices between backup frequency and system performance.   \n  * As your codebase expands, your backup strategy can scale seamlessly alongside it\n\nOrganizations can now implement more robust backup strategies without compromising on performance or completeness. What was once a challenging trade-off has become a straightforward operational practice.\n\nStarting with the [GitLab 18.0](https://about.gitlab.com/releases/2025/05/15/gitlab-18-0-released/) release, all GitLab customers regardless of their license tier can already fully take advantage of these improvements for their [backup](https://docs.gitlab.com/administration/backup_restore/backup_gitlab/) strategy and execution. There is no further change in configuration required.\n\n## What's next\n\nThis breakthrough is part of our ongoing commitment to scalable, enterprise-grade Git infrastructure. While the improvement of 48 hours to 41 minutes for backup creation time represents a significant milestone, we continue to identify and address performance bottlenecks throughout our stack.\n\nWe're particularly proud that this enhancement was contributed upstream to the Git project, benefiting not just GitLab users but the broader Git community. This collaborative approach to development ensures that improvements are thoroughly reviewed, widely tested, and available to all.\n\n> Deep infrastructure work like this is how we approach performance at GitLab. Join the GitLab 18 virtual launch event to see what other fundamental improvements we're shipping. [Register today!](https://about.gitlab.com/eighteen/)",[2727,2728,717,2729,9],"git","open source","performance",{"slug":2731,"featured":91,"template":698},"how-we-decreased-gitlab-repo-backup-times-from-48-hours-to-41-minutes","content:en-us:blog:how-we-decreased-gitlab-repo-backup-times-from-48-hours-to-41-minutes.yml","How We Decreased Gitlab Repo Backup Times From 48 Hours To 41 Minutes","en-us/blog/how-we-decreased-gitlab-repo-backup-times-from-48-hours-to-41-minutes.yml","en-us/blog/how-we-decreased-gitlab-repo-backup-times-from-48-hours-to-41-minutes",{"_path":2737,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2738,"content":2744,"config":2750,"_id":2752,"_type":14,"title":2753,"_source":16,"_file":2754,"_stem":2755,"_extension":19},"/en-us/blog/how-we-reduced-mr-review-time-with-value-stream-management",{"title":2739,"description":2740,"ogTitle":2739,"ogDescription":2740,"noIndex":6,"ogImage":2741,"ogUrl":2742,"ogSiteName":685,"ogType":686,"canonicalUrls":2742,"schema":2743},"How we reduced MR review time with Value Stream Management ","The GitLab engineering team leverages VSM to pinpoint bottlenecks in the merge request review process and streamline software delivery. See how we do it and what we've learned.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097876/Blog/Hero%20Images/Blog/Hero%20Images/REFERENCE%20-%20display%20preview%20for%20blog%20images%20%282%29_2pKf8RsKzAaThmQfqHIaa7_1750097875817.png","https://about.gitlab.com/blog/how-we-reduced-mr-review-time-with-value-stream-management","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How we reduced MR review time with Value Stream Management \",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Haim Snir\"}],\n        \"datePublished\": \"2025-02-20\",\n      }",{"title":2739,"description":2740,"authors":2745,"heroImage":2741,"date":2747,"body":2748,"category":928,"tags":2749},[2746],"Haim Snir","2025-02-20","At GitLab, we're passionate about using our own products internally, a.k.a. dogfooding. Dogfooding has led to significant improvements in accelerating our software delivery cycle time for customers. This article spotlights a specific use case where [GitLab Value Stream Management (VSM)](https://about.gitlab.com/solutions/value-stream-management/) has driven significant improvements for our engineering team. You'll learn how VSM helped us tackle two critical challenges: measuring the journey from idea conception to merge request completion, and streamlining our deployment workflows.\n\n## The Challenge: Identifying bottlenecks in MR reviews\n\nDespite having well-defined workflows, one team noticed that MRs were taking longer than expected to be reviewed and merged. The challenge wasn’t just about the delays themselves, but about understanding *where* in the review process these delays were happening and *why*.\n\nOur team’s goal was clear:\n\n- Identify where time was being spent from the initial idea to the final merge of an MR.  \n- Pinpoint specific bottlenecks in the review process.  \n- Understand how MR size, complexity, or documentation quality affect review time.\n\n## The Approach: Measures the MR review time in GitLab Value Stream Analytics\n\nValue Stream Analytics (VSA) enables organizations to map their entire workflow from idea to delivery, distinguishing between value-adding activities (VA) and non-value-adding activities (NVA) in the process flow. By calculating the ratio of value-added time to total lead time, the team can identify wasteful activities resulting in delays in MR reviews.\n\nTo obtain the necessary metrics, the team customized GitLab VSA to gain better visibility into our MR review process.\n\n### 1. Setting up a custom stage for MR review\n\nThe team added a [new custom stage](https://docs.gitlab.com/ee/user/group/value_stream_analytics/#value-stream-stage-events) in VSA called **Review Time to Merge** to specifically track the time from when a reviewer was first assigned to when the MR was merged.\n\n* Start event: MR first reviewer assigned  \n* End event: MR merged\n\nBy defining this stage, VSA began measuring the duration of the MR review process, giving us precise data on where time was being spent.\n\n![Defining stage of VSA](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097884/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097883929.png)\n\n### 2. Using the Total Time Chart for clarity\n\nWith the custom stage in place, the team used the [**Total Time Chart** on the VSA Overview page](https://about.gitlab.com/blog/value-stream-total-time-chart/) (**Analyze > Value Stream**) to visualize how much time was spent during the new MR Review stage. By comparing the values represented by each area on the chart, the team could quickly identify how this stage contributed to the total software delivery lifecycle (SDLC) time.\n\n![total time chart for VSA](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097884/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097883930.png)\n\n### 3. Drilling down for deeper insights\n\nTo investigate specific delays, the team used the **Stage Navigation Bar** to dive deeper into the MR Review stage. This view allowed them to:\n\n- Sort MRs by review time: The stage table showed all related MRs, sorted by review duration, making it easy to detect slow MRs.  \n- Analyze individual MRs: For each MR, that team could examine factors such as reviewer assignment delays, multiple rounds of feedback, idle time after approval, and MR size/complexity.\n\n## The outcome: Actionable insights and improvements\n\nBy customizing VSA to track [MR review time](https://docs.gitlab.com/user/project/merge_requests/reviews/), the team uncovered several key insights:\n\n- **Delays in reviewer assignment:** Some MRs experienced delays because reviewers were assigned late, or reviewers had too many MRs in their queue.  \n- **Slow review start times:** Even after assignment, certain MRs sat idle before reviews began, often due to context switching or competing priorities.  \n- **Multiple feedback loops:** Larger MRs often required multiple rounds of feedback, which extended review time significantly.  \n- **Idle time post-approval:** Some MRs were approved but not merged promptly, often due to deployment coordination issues.\n\nFor the engineering manager on the team, VSA proved to be transformational/valuable in managing their team's workflow: *\"I've used the VSA to justify where we were spending time in MR completion. We have VSA customized to our needs, and it's been very beneficial to our investigations for opportunities for improvements.”* \n\nAlso, from this dogfooding experience, we’re now developing a key enhancement to improve visibility into the review process. We're adding a new event to VSA — [Merge request last approved at](https://gitlab.com/gitlab-org/gitlab/-/issues/503754) — which creates a stage that breaks down MR review steps even further for granular visibility.\n\n## The power of data-driven decisions\n\nBy leveraging GitLab’s VSA, we didn’t just identify bottlenecks – we gained actionable insights that led to measurable improvements in MR review time and overall developer productivity. We optimized merge request review cycles and increased developer throughput, validating our commitment to continuous improvement through measurement.\n\n> Want to learn more about how VSA can help your team? [Start a free  trial of GitLab Ultimate](https://about.gitlab.com/free-trial/), customize your value streams, and see how you can make improvements throughout the SDLC for your teams. Then, make sure to [share your feedback and experiences in this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/520962).\n\n## Read more\n\n- [Optimize value stream efficiency to do more with less, faster](https://about.gitlab.com/the-source/platform/optimize-value-stream-efficiency-to-do-more-with-less-faster/)\n- [New Scheduled Reports Generation tool simplifies value stream management](https://about.gitlab.com/blog/new-scheduled-reports-generation-tool-simplifies-value-stream-management/)\n- [Value stream analytics documentation](https://docs.gitlab.com/user/group/value_stream_analytics/)\n- [Value stream management: Total Time Chart simplifies top-down optimization flow](https://about.gitlab.com/blog/value-stream-total-time-chart/)\n",[717,695,9,888,930],{"slug":2751,"featured":6,"template":698},"how-we-reduced-mr-review-time-with-value-stream-management","content:en-us:blog:how-we-reduced-mr-review-time-with-value-stream-management.yml","How We Reduced Mr Review Time With Value Stream Management","en-us/blog/how-we-reduced-mr-review-time-with-value-stream-management.yml","en-us/blog/how-we-reduced-mr-review-time-with-value-stream-management",{"_path":2757,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2758,"content":2763,"config":2768,"_id":2770,"_type":14,"title":2771,"_source":16,"_file":2772,"_stem":2773,"_extension":19},"/en-us/blog/improving-oauth-ropc-security-on-gitlab-com",{"title":2759,"description":2760,"ogTitle":2759,"ogDescription":2760,"noIndex":6,"ogImage":942,"ogUrl":2761,"ogSiteName":685,"ogType":686,"canonicalUrls":2761,"schema":2762},"Improving OAuth ROPC security on GitLab.com","GitLab.com is improving the security of OAuth Resource Owner Password Credentials (ROPC) by requiring client authentication, effective April 8, 2025.","https://about.gitlab.com/blog/improving-oauth-ropc-security-on-gitlab-com","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Improving OAuth ROPC security on GitLab.com\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"GitLab Security Team\"}],\n        \"datePublished\": \"2025-04-01\",\n      }",{"title":2759,"description":2760,"authors":2764,"heroImage":942,"date":1620,"body":2766,"category":717,"tags":2767},[2765],"GitLab Security Team","GitLab.com will require client authentication for OAuth Resource Owner Password Credentials (ROPC) beginning on **April 8, 2025**. ROPC was omitted by the OAuth working group in RFC Version 2.1. Existing ROPC integrations without client credentials will experience service disruption after this date. Please update your integrations to include client credentials before the deadline. \n\n## What is changing\n\nGitLab.com is improving the security of OAuth ROPC by requiring client authentication for all requests, effective **April 8, 2025**. For more details about ROPC and authentication mechanisms, read more in the [“Example ROPC Request Types” section of this notice](#example-ropc-request-types) or read about [ROPC in the OAuth API GitLab page](https://docs.gitlab.com/api/oauth2/#resource-owner-password-credentials-flow). \n\n## Why this change matters\n\n* **Enhanced security:** Client authentication provides an additional layer of security by ensuring that only authorized applications can request access tokens.  \n* **Standards compliance:** This change brings GitLab's OAuth implementation into alignment with industry best practices and OAuth 2.0 specifications.  \n* **Improved auditing:** Client authentication improves application request traceability and monitoring.\n\n## Required action\n\nWe strongly recommend updating your implementation before **April 8, 2025,** by following these steps:\n\n1. **Register your application** in GitLab to obtain client credentials:  \n   * Navigate to **User Settings > Applications** (or register a group or instance OAuth application as desired).  \n   * Create a new application or use an existing one.  \n   * Note the provided `Application ID` (client_id) and `Secret` (client_secret).  \n2. **Update your authentication requests** to include the client credentials:  \n   * Add the `client_id` and `client_secret` parameters to your token requests.  \n   * Test your implementation in our staging environment.  \n3. **Review our implementation documentation** for detailed guidance:  \n   * [GitLab OAuth Authentication Guide](https://docs.gitlab.com/ee/api/oauth2.html)\n\n## Example ROPC request types\n\nDetailed examples of authorization requests as documented in the [OAuth API GitLab page](https://docs.gitlab.com/api/oauth2/#resource-owner-password-credentials-flow) are listed below. \n\n**Insecure ROPC method example:**\n\n*This insecure ROPC method does not use client authentication, and will not work on GitLab.com after April 8, 2025.* \n\n```\nPOST /oauth/token\nContent-Type: application/x-www-form-urlencoded\n\ngrant_type=password&username=user@example.com&password=secret\t\n```\n\n**Insecure ROPC JSON method example:**\n\n*This insecure ROPC method does not use client authentication, and will not work on GitLab.com after April 8, 2025.* \n\n```\nPOST /oauth/token\nContent-Type: application/json\n{\n  \"grant_type\": \"password\",\n  \"username\": \"user@example.com\",\n  \"password\": \"secret\"\n}\n```\n\n**Required method going forward:**\n\n```\nPOST /oauth/token\nContent-Type: application/x-www-form-urlencoded\n\ngrant_type=password&username=user@example.com&password=secret&client_id=APP_ID&client_secret=APP_SECRET\n```\n\n**Required method - JSON example:** \n\n```\nPOST /oauth/token\nContent-Type: application/json\n\n{\n  \"grant_type\": \"password\",\n  \"username\": \"user@example.com\",\n  \"password\": \"secret\",\n  \"client_id\": \"APP_ID\",\n  \"client_secret\": \"APP_SECRET\"\n}\n```\n\n## Need further guidance?\n\n* **Documentation:** [GitLab OAuth 2.0 Guide](https://docs.gitlab.com/ee/api/oauth2.html)  \n* **Support:** Contact [GitLab Support](https://about.gitlab.com/support/)  \n* **Community Forum:** Discuss this change in the [GitLab Forum](https://forum.gitlab.com/)\n",[717,693,9],{"slug":2769,"featured":6,"template":698},"improving-oauth-ropc-security-on-gitlab-com","content:en-us:blog:improving-oauth-ropc-security-on-gitlab-com.yml","Improving Oauth Ropc Security On Gitlab Com","en-us/blog/improving-oauth-ropc-security-on-gitlab-com.yml","en-us/blog/improving-oauth-ropc-security-on-gitlab-com",{"_path":2775,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2776,"content":2782,"config":2787,"_id":2789,"_type":14,"title":2790,"_source":16,"_file":2791,"_stem":2792,"_extension":19},"/en-us/blog/inside-dora-performers-score-in-gitlab-value-streams-dashboard",{"title":2777,"description":2778,"ogTitle":2777,"ogDescription":2778,"noIndex":6,"ogImage":2779,"ogUrl":2780,"ogSiteName":685,"ogType":686,"canonicalUrls":2780,"schema":2781},"Inside DORA Performers score in GitLab Value Streams Dashboard ","Learn how four key metrics drive DevOps maturity, helping teams optimize workflows and achieve DevOps excellence.\n","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098908/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_644947854_248JIrEOCaGJdfJdiSjYde_1750098907747.jpg","https://about.gitlab.com/blog/inside-dora-performers-score-in-gitlab-value-streams-dashboard","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Inside DORA Performers score in GitLab Value Streams Dashboard \",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Haim Snir\"}],\n        \"datePublished\": \"2024-01-18\",\n      }",{"title":2777,"description":2778,"authors":2783,"heroImage":2779,"date":2784,"body":2785,"category":1277,"tags":2786},[2746],"2024-01-18","The DevOps Research and Assessment ([DORA](https://docs.gitlab.com/ee/user/analytics/dora_metrics.html)) metrics are industry-standard measurements to help better understand the capabilities that drive software delivery and operations performance. GitLab recently added a DORA Performers score panel to the Value Streams Dashboard in the GitLab DevSecOps Platform to visualize the status of the organization's DevOps performance across different projects.\n\nThis new visualization displays a breakdown of the DORA performance levels, designating a score level for each project under a group. Executives can use this visualization to easily identify the highs and lows in DORA scores and understand their organization's DevOps health top to bottom.\n\n> [Try the Value Streams Dashboard today.](https://about.gitlab.com/blog/getting-started-with-value-streams-dashboard/)\n\n## What are DORA metrics?\n\nDuring the past nine years, the DORA team gathered insights from over 36,000 professionals around the globe on how to measure the performance of a software development team. They identified four metrics as key indicators to measure software teams' development effectiveness and efficiency:\n\n- [Deployment frequency](https://docs.gitlab.com/ee/user/analytics/dora_metrics.html#deployment-frequency) and [Lead time for changes](https://docs.gitlab.com/ee/user/analytics/dora_metrics.html#lead-time-for-changes) measure team velocity.\n- [Change failure rate](https://docs.gitlab.com/ee/user/analytics/dora_metrics.html#change-failure-rate) and [Time to restore service](https://docs.gitlab.com/ee/user/analytics/dora_metrics.html#time-to-restore-service) measure stability.\n\nBy analyzing these metrics, teams are able to find areas for improvement, optimize their workflows, and ultimately drive positive business results.\n\nDORA uses these metrics to identify high-performing, medium-performing, and low-performing teams.  These performance levels provide a framework for organizations to assess their DevOps maturity and effectiveness.\n\n![DORA performers](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098929/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098929143.png)\n\nHigh performance indicates that the team is operating at excellent speed and stability in their software delivery, reaching the peak of DevOps maturity.\n\nMedium and low performance levels suggest opportunities for improvement in different aspects of the software development and delivery process.\n\nLet's take a closer look at the DORA definition for each performance level.\n\n![Chart of performance metrics](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098929/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098929144.png)\n\u003Csup>\u003Csub>_Source: [DORA Accelerate State of DevOps report](https://cloud.google.com/blog/products/devops-sre/dora-2022-accelerate-state-of-devops-report-now-out)_\u003C/sub>\u003C/sup>\u003Cp>\u003C/p>\n\n## GitLab definitions for the DORA score performance levels\n\nDORA metrics are available out of the box in the GitLab DevSecOps platform. To enable the score calculation to operate \"out of the box\" with GitLab, we adjust the scoring rules so they work with the platform's unified data model. Read more in the [score definition documentation](https://docs.gitlab.com/ee/user/analytics/value_streams_dashboard.html#dora-performers-score-panel).\n\nThe goal is for organizations to strive for high performance in these metrics, as a high score often correlates with better business outcomes, such as increased efficiency, faster time-to-market, and higher software quality.\n\n## DORA metrics in GitLab\n\nIn addition to the Value Streams dashboard, the DORA metrics are available also on the [CI/CD analytics charts](https://docs.gitlab.com/ee/user/analytics/ci_cd_analytics.html), which show the history of DORA metrics over time, and on [Insights reports](https://docs.gitlab.com/ee/user/project/insights/index.html#dora-query-parameters) where you can create custom charts.\n\nWatch our DORA overview video:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n \u003Ciframe src=\"https://www.youtube.com/embed/jYQSH4EY6_U?si=sE9rf_X58BGD2uK9\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## Get started today\nYou can get started with the Value Streams Dashboard by [following the instructions](https://about.gitlab.com/blog/getting-started-with-value-streams-dashboard/) in this documentation. Then, to help us improve the value of the Value Streams Dashboard, please share feedback about your experience in this [brief survey](https://gitlab.fra1.qualtrics.com/jfe/form/SV_50guMGNU2HhLeT4).\n",[2281,951,9,717,2729],{"slug":2788,"featured":6,"template":698},"inside-dora-performers-score-in-gitlab-value-streams-dashboard","content:en-us:blog:inside-dora-performers-score-in-gitlab-value-streams-dashboard.yml","Inside Dora Performers Score In Gitlab Value Streams Dashboard","en-us/blog/inside-dora-performers-score-in-gitlab-value-streams-dashboard.yml","en-us/blog/inside-dora-performers-score-in-gitlab-value-streams-dashboard",{"_path":2794,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2795,"content":2801,"config":2807,"_id":2809,"_type":14,"title":2810,"_source":16,"_file":2811,"_stem":2812,"_extension":19},"/en-us/blog/inside-look-how-gitlabs-test-platform-team-validates-ai-features",{"title":2796,"description":2797,"ogTitle":2796,"ogDescription":2797,"noIndex":6,"ogImage":2798,"ogUrl":2799,"ogSiteName":685,"ogType":686,"canonicalUrls":2799,"schema":2800},"Inside look: How GitLab's Test Platform team validates AI features","Learn how we continuously analyze AI feature performance, including testing latency worldwide, and get to know our new AI continuous analysis tool.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099033/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2811%29_78Dav6FR9EGjhebHWuBVan_1750099033422.png","https://about.gitlab.com/blog/inside-look-how-gitlabs-test-platform-team-validates-ai-features","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Inside look: How GitLab's Test Platform team validates AI features\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Mark Lapierre\"},{\"@type\":\"Person\",\"name\":\"Vincy Wilson\"}],\n        \"datePublished\": \"2024-06-03\",\n      }",{"title":2796,"description":2797,"authors":2802,"heroImage":2798,"date":1440,"body":2805,"category":801,"tags":2806},[2803,2804],"Mark Lapierre","Vincy Wilson","AI is increasingly becoming a centerpiece of software development - many companies are integrating it throughout their DevSecOps workflows to improve productivity and increase efficiency. Because of this now-critical role, AI features should be tested and analyzed on an ongoing basis. In this article, we take you behind the scenes to learn how [GitLab's Test Platform team](https://handbook.gitlab.com/handbook/engineering/infrastructure/test-platform/) does this for [GitLab Duo](https://about.gitlab.com/gitlab-duo/) features by conducting performance validation, functional readiness, and continuous analysis across GitLab versions. With this three-pronged approach, GitLab aims to ensure that GitLab Duo features are performing optimally for our customers.\n\n> Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Watch today!](https://about.gitlab.com/seventeen/)\n\n## AI and testing\n\nAI's non-deterministic nature, where the same input can produce different outputs, makes ensuring a great user experience a challenge. So, when we integrated AI deep into the GitLab DevSecOps Platform, we had to adapt to our best practices to address this challenge. \n\nThe [Test Platform team's mission ](https://handbook.gitlab.com/handbook/engineering/infrastructure/test-platform/) is to help enable the successful development and deployment of high-quality software applications with continuous analysis and efficiency to help ensure customer satisfaction. The key to achieving this is by delivering tools that help increase standardization, repeatability, and test consistency. \n\nApplying this to GitLab Duo, our AI suite of tools to power DevSecOps workflows, means being able to continuously analyze its performance and identify opportunities for improvement. Our goal is to gain clear, actionable insights that will help us to enhance GitLab Duo's capabilities and, as a result, better meet our customers' needs. \n\n## The need for continuous analysis of AI\n\nTo continuously assess GitLab Duo, we needed a mechanism for analyzing feature performance across releases. Therefore, we created an AI continuous analysis tool to automate the collection and analysis of data to achieve this. \n\n![diagram of how the AI continuous analysis tool works](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099041/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750099041503.png)\n\n\u003Ccenter>\u003Ci>How the AI continuous analysis tool works\u003C/i>\u003C/center>\n\n### Building the AI continuous analysis tool\n\nTo gain detailed, user-centric insights, we needed to gather data in the appropriate context – in this case, the integrated development environment (IDE), as it is where most of our users access GitLab Duo. We narrowed this down further by opting for the Visual Studio Code IDE, a popular choice within our community. Once the environment was chosen, we automated entering code prompts and recording the provided suggestions. The interactions with the IDE are handled by the [WebdriverIO VSCode service](https://github.com/webdriverio-community/wdio-vscode-service), and CI operations are handled through [GitLab CI/CD](https://docs.gitlab.com/ee/ci/). This automation significantly scaled up data collection and eliminated repetitive tasks for GitLab team members. To start, we have focused on measuring the performance of GitLab Duo Code Suggestions, but plan to expand to other GitLab AI features in the future.\n\n### Analyzing the data\n\nAt the core of our AI continuous analysis tool is a mechanism for collecting and analyzing code suggestions. This involves automatically entering code prompts, recording the suggestions provided, and logging timestamps of relevant events. We measure the time from when the tool provides an input until a suggestion is displayed in the UI. In addition, we record the logs created by the IDE, which report the time it took for each suggestion response to be received. With this data, we can compare the latency of suggestions in terms of how long it takes the backend AI service to send a response to the IDE, and how long it takes for the IDE to display the suggestion for the user. We then can compare latency and other metrics of GitLab Duo features across multiple releases. The GitLab platform has the ability to analyze [code quality](https://docs.gitlab.com/ee/ci/testing/code_quality.html) and [application security](https://docs.gitlab.com/ee/user/application_security/), so we leverage these capabilities to enable the AI continuous analysis tool to analyze the quality and security of the suggestions provided by GitLab Duo.\n\n### Improving AI-driven suggestions\n\nOnce the collected data is analyzed, the tool automatically generates a single report summarizing the results. The report includes key statistics (e.g., mean latency and/or latency at various percentiles), descriptions of notable differences or patterns, links to raw data, and CI/CD pipeline logs and artifacts. The tool also records a video of each prompt and suggestion, which allows us to review specific cases where differences are highlighted. This creates an opportunity for the UX researchers and development teams to take action on the insights gained, helping to improve the overall user experience and system performance.\n\nThe tool is at an early stage of development, but it's already helped us to improve the experience for GitLab Duo Code Suggestions users. Moving forward, we plan to expand our tool’s capabilities, incorporate more metrics and consume and provide input to our [Centralized Evaluation Framework](https://about.gitlab.com/direction/ai-powered/ai_framework/ai_evaluation/), which validates AI models, to enhance our continuous analysis further.\n\n## Performance validation\n\nAs AI has become integral to GitLab's offerings, optimizing the performance of AI-driven features is essential. Our performance tests aim to evaluate and monitor the performance of our GitLab components, which interact with AI service backends. While we can monitor the performance of these external services as part of our production environment's observability, we cannot control them. Thus, including third-party services in our performance testing would be expensive and yield limited benefits. Although third-party AI providers contribute to overall latency, the latency attributable to GitLab components is still important to check. We aim to detect changes that might lead to performance degradation by monitoring GitLab components. \n\n### Building AI performance validation test environment\n\nIn our AI test environments, the [AI Gateway](https://docs.gitlab.com/ee/architecture/blueprints/ai_gateway/#summary), which is a stand-alone service to give access to AI features to GitLab users, has been configured to return mocked responses, enabling us to test the performance of AI-powered features without interacting with third-party AI service providers. We conduct AI performance tests on [reference architecture environments of various sizes](https://docs.gitlab.com/ee/administration/reference_architectures/). Additionally, we evaluate new tests in their own isolated environment before they're added to the larger environments.\n\n### Testing multi-regional latency\n\nMulti-regional latency tests need to be run from various geolocations to validate that requests are being served from a suitable location close to the source of the request. We do this today with the use of the [GitLab Environment Toolkit](https://gitlab.com/gitlab-org/gitlab-environment-toolkit). The toolkit provisions an environment in the identified region to test (note: both the AI Gateway and the provisioned environment are in the same region), then uses the [GitLab Performance Tool](https://gitlab.com/gitlab-org/quality/performance) to run tests to measure time to first byte (TTFB). TTFB is our way of measuring time to the first part of the response being rendered, which contributes to the perceived latency that a customer experiences. To account for this measurement, our tests have a check to help ensure that the [response itself isn't empty](https://gitlab.com/gitlab-org/quality/performance/-/blob/cee8bef023e590e6ca75828e49f5c7c596581e06/k6/tests/experimental/api_v4_code_suggestions_generation_streaming.js#L70). \n\nOur tests are expanding further to continue to measure perceived latency from a customer’s perspective. We have captured a set of baseline response times that indicate how a specific set of regions performed when the test environment was in a known good state. These baselines allow us to compare subsequent environment updates and other regions to this known state to evaluate the impact of changes. These baseline measurements can be updated after major updates to ensure they stay relevant in the future. \n\nNote: As of this article's publication date, we have AI Gateway deployments across the U.S., Europe, and Asia. To learn more, visit our [handbook page](https://handbook.gitlab.com/handbook/engineering/development/data-science/ai-powered/ai-framework/#-aigw-region-deployments).\n\n## Functionality\n\nTo help continuously enable customers to confidently leverage AI reliably, we must continuously work to ensure our AI features function as expected.\n\n### Unit and integration tests\n\nFeatures that leverage AI models still require rigorous automated tests, which help engineers develop new features and changes confidently. However, since AI features can involve integrating with third-party AI providers, we must be careful to stub any external API calls to help ensure our tests are fast and reliable.\n\nFor a comprehensive look at testing at GitLab, look at our [testing standards and style guidelines](https://docs.gitlab.com/ee/development/testing_guide/). \n\n### End-to-end tests \n\nEnd-to-end testing is a strategy for checking whether the application works as expected across the entire software stack and architecture. We've implemented it in two ways for GitLab Duo testing: using real AI-generated responses and mock-generated AI responses.\n\n![validating features - image 2](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099041/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750099041504.png)\n\n\u003Ccenter>\u003Ci>End-to-end test workflow\u003C/i>\u003C/center>\n\n#### Using real AI-generated responses\n\nAlthough costly, end-to-end tests are important to help ensure the entire user experience functions as expected. Since AI models are non-deterministic, end-to-end test assertions for validating real AI-generated responses should be loose enough to help ensure the feature functions without relying on a response that may change. This might mean an assertion that checks for some response with no errors or for a response we are certain to receive.\n\nAI-driven functionality is not accessible only from within the GitLab application, so we must also consider user workflows for other applications that leverage these features. For example, to cover the use case of a developer requesting code suggestions in [IntelliJ IDEA](https://www.jetbrains.com/idea/) using the GitLab Duo plugin, we need to drive the IntelliJ application to simulate a user workflow. Similarly, to ensure that the GitLab Duo Chat experience is consistent in VS Code, we must drive the VS Code application and exercise the GitLab Workflow extension. Working to ensure these workflows are covered helps us maintain a consistently great developer experience across all GitLab products. \n\n#### Using mock AI-generated responses\n\nIn addition to end-to-end tests using real AI-generated responses, we run some end-to-end tests against test environments configured to return mock responses. This allows us to verify changes to GitLab code and components that don’t depend on responses generated by an AI model more frequently.\n\n> For a closer look at end-to-end testing, read our [end-to-end testing guide](https://docs.gitlab.com/ee/development/testing_guide/end_to_end/). \n\n### Exploratory testing and dogfooding\n\nAI features are built by humans for humans. At GitLab, exploratory testing and dogfooding greatly benefit us. GitLab team members are passionate about what features get shipped, and insights from internal usage are invaluable in shaping the direction of AI features.\n\n[Exploratory testing](https://about.gitlab.com/topics/devops/devops-test-automation/#test-automation-stages) allows the team to creatively exercise features to help ensure edge case bugs are identified and resolved. Dogfooding encourages team members to use AI features in their daily workflows, which helps us identify realistic issues from realistic users. For a comprehensive look at how we dogfood AI features, look at [Developing GitLab Duo: How we are dogfooding our AI features](https://about.gitlab.com/blog/developing-gitlab-duo-how-we-are-dogfooding-our-ai-features/).\n\n## Get started with GitLab Duo\nHopefully this article gives you insight into how we are validating AI features at GitLab. We have integrated our team's process into our overall development as we iterate on GitLab Duo features. We encourage you to try GitLab Duo in your organization and reap the benefits of AI-powered workflows.\n\n> Start a [free trial of GitLab Duo](https://about.gitlab.com/gitlab-duo/#free-trial) today!\n\n_Members of the GitLab Test Platform team contributed to this article._\n",[803,695,9,1423,972,2729],{"slug":2808,"featured":91,"template":698},"inside-look-how-gitlabs-test-platform-team-validates-ai-features","content:en-us:blog:inside-look-how-gitlabs-test-platform-team-validates-ai-features.yml","Inside Look How Gitlabs Test Platform Team Validates Ai Features","en-us/blog/inside-look-how-gitlabs-test-platform-team-validates-ai-features.yml","en-us/blog/inside-look-how-gitlabs-test-platform-team-validates-ai-features",{"_path":2814,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2815,"content":2820,"config":2826,"_id":2828,"_type":14,"title":2829,"_source":16,"_file":2830,"_stem":2831,"_extension":19},"/en-us/blog/introducing-ci-cd-steps-a-programming-language-for-devsecops-automation",{"title":2816,"description":2817,"ogTitle":2816,"ogDescription":2817,"noIndex":6,"ogImage":732,"ogUrl":2818,"ogSiteName":685,"ogType":686,"canonicalUrls":2818,"schema":2819},"Introducing CI/CD Steps, a programming language for DevSecOps automation","Inside GitLab’s vision for CI/CD programmability and a look at how we simplified workflow automation.","https://about.gitlab.com/blog/introducing-ci-cd-steps-a-programming-language-for-devsecops-automation","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Introducing CI/CD Steps, a programming language for DevSecOps automation\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Darren Eastman\"}],\n        \"datePublished\": \"2024-08-06\",\n      }",{"title":2816,"description":2817,"authors":2821,"heroImage":732,"date":2823,"body":2824,"category":717,"tags":2825},[2822],"Darren Eastman","2024-08-06","For years, the DevOps industry has tried to simplify how developers create automation scripts or workflows to automatically test a code change and to perform a task with the resulting artifact or binary. Today, we are introducing [CI/CD Steps](https://docs.gitlab.com/ee/ci/steps/), a programming language for DevSecOps automation in experiment phase, as a solution to this challenge. With CI/CD Steps, software development teams can easily create complex automation workflows within GitLab.\n\n## The path to CI/CD Steps\n\nEarly in the company's history, GitLab founders and engineers decided that there must be a tight integration between source code management, the place you store your code, and continuous integration, the automation workflows that test your code changes. And we've continued to evolve that integration, focusing on workflow automation tasks and differentiating from the approaches of CI engines across the industry, including Jenkins CI's domain-specific language, GitHub Actions, and many more. \n\nAnd, yes, I did mean to use the term workflow automation tasks rather than [CI and continuous deployment (CD)](https://about.gitlab.com/topics/ci-cd/). This is simply a result of the code that I have seen our customers develop. In a lot of cases, the platform engineering teams that support development teams using GitLab are writing complex automation scripts (workflows). So we need to embrace a more expansive construct beyond simply CI and CD. In fact, I have seen some developers rave about the flexibility of new CI/CD solutions that allow for modularity and conditionals in writing automation workflows.\n\nAt GitLab, our initial approach for CI authoring was based on YAML. We can endlessly debate the pros and cons of such a choice, but for me, as a [DevOps](https://about.gitlab.com/topics/devops/) practitioner coming from a large Fortune 50 company with a moshpit of Jenkins Groovy code and hundreds of permutations of scripts basically performing the same job, the GitLab CI authoring and execution approach was a breath of fresh air. \n\nThe first time I read a GitLab CI file – this was back in mid-2019 – my first thought was, \"No, it could not be that simple.\" A non-developer can easily grasp the intent of a basic GitLab CI pipeline without prior knowledge of all of the intricacies of the syntax of the execution model. In fact, I had just spent a year working on a team that spent several hours each day helping other development teams debug Jenkins pipelines written in Groovy and trying to figure out how to test, and in some cases build, large Java monoliths; in other cases, tons of microservices.\n\nWhile there are benefits to a GitLab CI YAML-based authoring and a bash script execution type approach, there are also limitations. Limitations that developers or platform engineers bump into as they integrate more complex workflows into their CI pipelines. These issues seem to be amplified at enterprise scale as platform teams are trying to simplify or standardize workflows across multiple development teams. In fact, one of the quotes from a recent customer survey states: “GitLab needs to embrace a post-YAML world for CI.”\n\nSo, over the past two years, our pipeline authoring team, led by Product Manager [Dov Hershkovitch](https://gitlab.com/dhershkovitch), has been working extensively on improving the pipeline authoring experience. They've also been improving the management experience of the building blocks for workflow automation – especially at scale. In fact, a part of this work, the [GitLab CI/CD Catalog](https://about.gitlab.com/blog/ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch/), recently became generally available.\n\nThe logical next step was to build a new language for workflow automation.\n\n## Understanding CI/CD Steps\n\nGitLab CI/CD Steps is a concept incubated by our top-notch engineers. In [our documentation](https://docs.gitlab.com/ee/ci/steps/), we describe CI/CD Steps as reusable and composable pieces of a CI job that can be referenced in a GitLab CI pipeline configuration. But what does that really mean and what is the long-term value proposition?\n\nAs I was giving this some thought, a comment from one of our customers (paraphrased here) came to mind:\n\n“CI/CD Steps enables you to compose inputs and outputs for a CI/CD job. With CI/CD Steps, developers can define inputs and outputs and, therefore, use CI/CD Steps as a function as we do in any modern programming language. A key differentiator to a normal CI/CD component is that CI/CD Steps allows the use of the outputs of other steps without GitLab having to know certain values before running the pipeline. With CI/CD Steps, you could more easily auto-cancel redundant jobs when all jobs are running as part of the parent pipeline versus having to use child pipelines.”\n\nHaving CI/CD Steps alongside the current GitLab CI/CD execution mechanism and the [CI/CD component catalog](https://docs.gitlab.com/ee/ci/components/index.html) unlocks so many possibilities for creating and maintaining the most complex CI/CD workflows. \n\nA key feature is reusability. Now, I am not suggesting that once we release CI/CD Steps as generally available, you would immediately start refactoring your currently working CI/CD jobs to CI/CD Steps. Instead, you likely will find opportunities to introduce CI/CD Steps to optimize complex pipeline workflows, and, in doing so, you will begin to reuse a CI/CD Step that you author in multiple pipelines.\n\nCI/CD Steps is a marathon, not a sprint. When we release this in beta (currently targeted for late 2024) and start getting feedback from you, we will learn new information that will guide the evolution of this new CI programming language as well as the new Step Runner, which is designed specifically to run CI/CD Steps alongside the current CI/CD jobs.\n\nI'm sure there will be questions about our strategy: Why did we make certain syntax choices? Why didn't we use Starlark as the basis for this new approach? Why did we create something new that we all have to learn? My boilerplate response is: At GitLab we develop our software in the open. More importantly, as a customer, user, and community member, if you have an idea of how to make it better, we invite you to create a merge request so we can improve this feature together.\n\nWe are the only enterprise software platform where, as users and customers, **you** have a direct say in how the platform evolves and **you** can see the changes happening transparently and in real time. That’s the power of GitLab – we iterate and we collaborate. You have invested in a platform and community that is able to evolve with the ever-changing software industry.\n\n## Create your own CI/CD step\n\nTo get a deeper understanding of CI Steps and our direction, take a look at the detailed refactoring proof-of-concept writeup in [this issue](https://gitlab.com/gitlab-org/step-runner/-/issues/85). [Principal engineer Joe Burnett](https://gitlab.com/josephburnett) walks through in great detail the thought process for refactoring a CI/CD job used as part of our GitLab Runner automated test framework. There are also recommendations noted at the end that will inform the evolution of the CI Steps syntax.\n\nThen check out the [CI/CD Steps tutorial](https://docs.gitlab.com/ee/tutorials/setup_steps/) and try creating your own CI/CD step. We recently released the `run` keyword, so testing out a CI/CD step will be simpler than previous examples that required using environment variables. This feature set is experimental so please share your experiences on the [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/460057). There also is a separate feedback issue if you are testing the [Run GitHub Actions with CI/CD Steps experimental feature](https://docs.gitlab.com/ee/ci/steps/#actions).\n\nWe look forward to working with you on this journey to continuously improve the GitLab CI/CD authoring experience.\n\n## Read more\n- [CI/CD Catalog goes GA](https://about.gitlab.com/blog/ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch/)\n- [FAQ: GitLab CI/CD Catalog](https://about.gitlab.com/blog/faq-gitlab-ci-cd-catalog/)\n- [What is CI/CD?](https://about.gitlab.com/topics/ci-cd/)\n- [The basics of CI](https://about.gitlab.com/blog/basics-of-gitlab-ci-updated/)\n",[9,109,1543,1542,695],{"slug":2827,"featured":91,"template":698},"introducing-ci-cd-steps-a-programming-language-for-devsecops-automation","content:en-us:blog:introducing-ci-cd-steps-a-programming-language-for-devsecops-automation.yml","Introducing Ci Cd Steps A Programming Language For Devsecops Automation","en-us/blog/introducing-ci-cd-steps-a-programming-language-for-devsecops-automation.yml","en-us/blog/introducing-ci-cd-steps-a-programming-language-for-devsecops-automation",{"_path":2833,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2834,"content":2840,"config":2847,"_id":2849,"_type":14,"title":2850,"_source":16,"_file":2851,"_stem":2852,"_extension":19},"/en-us/blog/introducing-compromised-password-detection-for-gitlab-com",{"title":2835,"description":2836,"ogTitle":2835,"ogDescription":2836,"noIndex":6,"ogImage":2837,"ogUrl":2838,"ogSiteName":685,"ogType":686,"canonicalUrls":2838,"schema":2839},"Introducing compromised password detection for GitLab.com","GitLab is adding compromised password detection on June 19, 2025. After that date, users logging in with known compromised passwords will be warned.  Here is what you need to know.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097341/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%287%29_6QBUJnfaq500YYVKVDlxK7_1750097340425.png","https://about.gitlab.com/blog/introducing-compromised-password-detection-for-gitlab-com","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Introducing compromised password detection for GitLab.com\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Ruby Nealon\"},{\"@type\":\"Person\",\"name\":\"Matt Coons\"}],\n        \"datePublished\": \"2025-05-22\",\n      }",{"title":2835,"description":2836,"authors":2841,"heroImage":2837,"date":2844,"body":2845,"category":693,"tags":2846},[2842,2843],"Ruby Nealon","Matt Coons","2025-05-22","Data breaches have become more common than ever. [According to a recent report by the Identity Theft Resource Center](https://www.idtheftcenter.org/publication/2024-data-breach-report/), over 2,800 data breaches occurred in 2024 alone, with over 1 billion victim notices sent by compromised organizations. Often, these breaches result in the exposure of credentials – usernames, emails, and passwords – in plain text, either directly or with insufficient protection against conversion to plain text. These compromised or stolen credentials are actively and widely leveraged by attackers, too. [Verizon’s 2024 Data Breach Investigations Report](https://www.verizon.com/business/resources/reports/2024-dbir-data-breach-investigations-report.pdf) identified use of stolen credentials as the initial action in 24% of breaches, ranking it as their top initial action. \n\nGitLab.com stores your password securely, salted and hashed with bcrypt. Your password goes through a one-way hashing transformation before storage, securing your password and ensuring it is not possible to extract the original password from storage. The representation is also unique: Even if two users shared the same password, the results of the one-way transformations would be completely different. However, these safeguards intentionally make it impractical to identify all users with a compromised or otherwise weak password.\n\n__Starting on June 19, 2025, GitLab will be introduce compromised password detection during sign-in for all GitLab.com users.__ This works by securely comparing the password you log in with against a database of known compromised credentials during authentication. If the password is correct but matches known compromised credentials, you will be alerted with a banner on GitLab.com and you will be sent an email notification until you change your password. \n\n***Note:** Compromised password detection is only for logins using GitLab’s native username and password and does not apply to credentials used through SSO.*\n\nExample compromised password warning banner: \n\n![Example Compromised Password Warning Banner](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097349/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097348674.png)\n\nWe’re excited to introduce this additional countermeasure to help ensure your account is secure. We also encourage users to take these additional preventive actions to maintain the security of your account(s): \n\n1. [**Use a strong password unique to your GitLab.com account.**](https://docs.gitlab.com/user/profile/user_passwords/#change-a-known-password)\nGitLab [disallows weak passwords](https://docs.gitlab.com/user/profile/user_passwords/#block-weak-passwords) that are considered compromised or that contain part of your name, email address, or predictable words. We strongly recommend using a password manager like [1Password](https://1password.com/), [Google Password Manager](https://passwords.google.com/), or [Apple Passwords](https://support.apple.com/en-us/120758), as well.  \n3. [**Set up two-factor authentication for your GitLab.com account.**](https://docs.gitlab.com/user/profile/account/two_factor_authentication/#enable-two-factor-authentication)\nGitLab supports time-based, one-time password applications, like [Google Authenticator](https://support.google.com/accounts/answer/1066447?hl=en&co=GENIE.Platform%3DAndroid) and WebAuthn, with a [PIN/fingerprint](https://support.google.com/chromebook/answer/10364515?hl=en) or a [hardware security key](https://www.yubico.com/jp/product/security-key-series/security-key-nfc-by-yubico-black/).   \n5. **Prevent yourself from getting locked out of your account.**\n[Change your primary email address](https://docs.gitlab.com/user/profile/#change-your-primary-email) if you no longer have access, and [ensure you have recovery codes](https://docs.gitlab.com/user/profile/account/two_factor_authentication/#recovery-codes) in case your two-factor authentication device is lost or stolen. Also, consider [setting up an alternative method for two-factor authentication](https://docs.gitlab.com/user/profile/account/two_factor_authentication/#set-up-a-webauthn-device).  \n8. **Stay aware of new risks.**\nRegister with a service like [haveibeenpwned.com](http://haveibeenpwned.com) to receive an email notification if your email address appears in a newly disclosed breach. This service is free to use and requires only your email address at registration.\n\n> To learn more about trust and security measures on GitLab.com, visit the [GitLab security page](https://about.gitlab.com/security/), highlighting the GitLab Trust Center, compliance certifications, and security measures that keep users and customers safe on our platform.\n",[693,717,695,1133,9],{"slug":2848,"featured":91,"template":698},"introducing-compromised-password-detection-for-gitlab-com","content:en-us:blog:introducing-compromised-password-detection-for-gitlab-com.yml","Introducing Compromised Password Detection For Gitlab Com","en-us/blog/introducing-compromised-password-detection-for-gitlab-com.yml","en-us/blog/introducing-compromised-password-detection-for-gitlab-com",{"_path":2854,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2855,"content":2861,"config":2867,"_id":2869,"_type":14,"title":2870,"_source":16,"_file":2871,"_stem":2872,"_extension":19},"/en-us/blog/introducing-custom-compliance-frameworks-in-gitlab",{"title":2856,"description":2857,"ogTitle":2856,"ogDescription":2857,"noIndex":6,"ogImage":2858,"ogUrl":2859,"ogSiteName":685,"ogType":686,"canonicalUrls":2859,"schema":2860},"Introducing Custom Compliance Frameworks in GitLab","Reduce manual tracking, accelerate audit readiness, and enforce controls faster natively within GitLab DevSecOps workflows.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099268/Blog/Hero%20Images/Blog/Hero%20Images/GitLab_Blog_Header_v4_YBzFAgt2EAkqQfqxNFEgj_1750099267940.svg","https://about.gitlab.com/blog/introducing-custom-compliance-frameworks-in-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Introducing Custom Compliance Frameworks in GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Ian Khor\"},{\"@type\":\"Person\",\"name\":\"Salman Ladha\"}],\n        \"datePublished\": \"2025-04-17\",\n      }",{"title":2856,"description":2857,"authors":2862,"heroImage":2858,"date":2160,"body":2865,"category":693,"tags":2866},[2863,2864],"Ian Khor","Salman Ladha","Maintaining multiple compliance frameworks in fast-moving DevSecOps pipelines is more difficult than ever. As standards evolve independently and become more complex, organizations are buried in overlapping requirements and manual processes – draining developer time and slowing audits. \n\nTo solve this, GitLab is introducing Custom Compliance Frameworks and 50 out-of-the-box (OOTB) controls for a wide variety of compliance standards, including ISO 27001, the [CIS Benchmark](https://about.gitlab.com/blog/new-cis-gitlab-benchmark-scanner-boosts-security-and-compliance/), and SOC 2.\n\nCustom Compliance Frameworks enable organizations to map multiple, overlapping controls from different standards and regulations into a single, unified framework. This flexibility brings much-needed efficiency, allowing businesses to tailor compliance programs in a way that makes sense for them. As these policies are embedded directly into GitLab’s CI/CD pipelines, compliance is enforced automatically – without disrupting development.\n\nAdditionally, with the OOTB controls, teams can accelerate compliance adoption, eliminating the need for external tools or complex custom configurations. By embedding compliance directly into the software development lifecycle, GitLab provides real-time visibility, automated enforcement, and simplified audit readiness so teams can ship secure, *compliant* software, faster. \n\nCustom Compliance Frameworks and OOTB controls are available now in GitLab Ultimate.\n\n## Mounting compliance pressure\n\nOrganizations must navigate various compliance frameworks to ensure adherence to numerous regulations and provide assurance to their customers. While these frameworks often share common controls, they rarely align. The result is a reality compliance teams know all too well: manual tracking through spreadsheets that breeds chaos, particularly during audit reviews. \n\nDevelopers are pulled into the compliance fray because modern software development is central to satisfying many of these controls. Instead of building and shipping secure software, they find themselves supporting evidence collection and compliance reviews. A Forrester Total Economic Impact™ Study of GitLab Ultimate found that prior to GitLab developers spent up to [80 hours annually on audit and compliance tasks](https://tei.forrester.com/go/GitLab/GitLabUltimate/?lang=en-us#Appendixes); time diverted from writing code and delivering business value.\n\nThis fragmented approach isn’t just inefficient, it’s costly. Compliance-related costs have [surged by 60% over the past five years](https://www.cato.org/sites/cato.org/files/2024-01/research-brief367.pdf), according to the CATO Institute. Without a system that connects compliance enforcement to where software is built, compliance will remain a burdensome afterthought that drives a wedge between developers and security teams. \n\n## Why should you care about Custom Compliance Frameworks\n\nOur customers have asked for greater flexibility when it comes to the tracking and enforcement of compliance within DevSecOps workflows. With this release, we’re happy to empower customers in the following ways: \n\n**Compliance that fits the business, not the other way around**\n\nRegulatory requirements overlap across multiple frameworks causing complexity in tracking and enforcement. Custom Compliance Frameworks allow organizations to create a unified framework that cleanly maps the requirements and controls of multiple standards, reducing manual effort and reliance on costly consultants.\n\n**Faster compliance from setup through to audits**\n\nStart monitoring compliance instantly with OOTB controls aligned with key compliance standards, such as SOC 2, ISO 27001, and CIS Benchmarks. Automated compliance monitoring and evidence collection cuts audit prep from weeks to days, ensuring developers can remain focused on delivering secure software. \n\n**Built-in compliance at the speed of development**\n\nUnlike traditional GRC tools that operate in isolation, GitLab enforces compliance directly in CI/CD pipelines where work happens. This deep integration means compliance validation occurs automatically as code moves through the pipeline, eliminating the traditional friction between development speed and security requirements.\n\nHere is an example of how a custom compliance framework can be created in GitLab:\n\n![custom compliance frameworks - edit requirement screen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099291/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750099291312.png)\n\n\u003Cbr>\u003C/br>\n\n![custom compliance frameworks - screen showing requirements](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099291/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750099291312.png)\n\n## What to know about the Custom Compliance Frameworks rollout\n\nThere are two critical aspects of this release: \n\n- As of GitLab 18.0, Custom Compliance Frameworks will be enabled by default. \n- Starting in GitLab 18.0, we’ve enabled Custom Compliance Frameworks by default. We’ve also removed \"Standards\" from the Compliance Center to simplify the experience. Don’t worry — your existing compliance controls still apply. We’ve converted the GitLab Standard and SOC 2 standards into compliance framework labels and transformed their compliance checks into controls (our new term going forward).\n- Only GitLab Ultimate customers can define requirements, map controls, and enforce compliance frameworks. Premium users can still use compliance labels, but they won’t have access to the full feature set.\n\nTo learn more about Custom Compliance Frameworks, please watch this introduction video:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/yfJ0oHCIn-8?si=z_Rt_ikry4RhjEAC\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## Shift compliance left with GitLab  \n\nSimilar to security, shifting compliance left means addressing compliance requirements earlier in the software development lifecycle. Since software is central to an organization achieving compliance, embedding controls where software is created is crucial. With GitLab, security and compliance teams can define frameworks, map controls, and automate enforcement directly in CI/CD pipelines. Developers stay focused on shipping features, while compliance teams gain real-time visibility and automated evidence collection to be audit-ready. This unified approach bridges the gap between development and compliance, helping organizations achieve continuous compliance as part of their DevSecOps practice. \n\nAs a result, organizations using GitLab can reduce developer time spent on audit and compliance tasks by 90% and accelerate external audits from several weeks to under one week, according to [Forrester](https://tei.forrester.com/go/GitLab/GitLabUltimate/?lang=en-us#AnalysisOfBenefits). \n\nIf you’re an existing GitLab Ultimate customer and would like to learn more about how Custom Compliance Frameworks can help improve your compliance and security program, [visit our Compliance Center documentation](https://docs.gitlab.com/user/compliance/compliance_center/) where we cover implementation requirements, use cases, and more.\n\n***Note:** ”The Total Economic Impact™ Of GitLab Ultimate” is a commissioned study conducted by Forrester Consulting on behalf of GitLab, October 2024. Results are based on a composite organization representative of interviewed customers.*\n\n## Learn more\n\n- [How to ensure separation of duties and enforce compliance with GitLab](https://about.gitlab.com/blog/ensuring-compliance/)\n- [Meet regulatory standards with GitLab security and compliance](https://about.gitlab.com/blog/meet-regulatory-standards-with-gitlab/)\n- [Guide to fulfilling SOC 2 security requirements with GitLab](https://about.gitlab.com/blog/guide-to-fulfilling-soc-2-security-requirements-with-gitlab/)",[693,717,695,9],{"slug":2868,"featured":6,"template":698},"introducing-custom-compliance-frameworks-in-gitlab","content:en-us:blog:introducing-custom-compliance-frameworks-in-gitlab.yml","Introducing Custom Compliance Frameworks In Gitlab","en-us/blog/introducing-custom-compliance-frameworks-in-gitlab.yml","en-us/blog/introducing-custom-compliance-frameworks-in-gitlab",{"_path":2874,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2875,"content":2881,"config":2888,"_id":2890,"_type":14,"title":2891,"_source":16,"_file":2892,"_stem":2893,"_extension":19},"/en-us/blog/introducing-gitlab-advanced-vulnerability-tracking",{"title":2876,"description":2877,"ogTitle":2876,"ogDescription":2877,"noIndex":6,"ogImage":2878,"ogUrl":2879,"ogSiteName":685,"ogType":686,"canonicalUrls":2879,"schema":2880},"Introducing GitLab Advanced Vulnerability Tracking","Learn how this security feature improves the efficiency of vulnerability management by reducing futile auditing time (includes data from a new study).","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749664844/Blog/Hero%20Images/AdobeStock_941867776.jpg","https://about.gitlab.com/blog/introducing-gitlab-advanced-vulnerability-tracking","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Introducing GitLab Advanced Vulnerability Tracking\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Julian Thome\"}],\n        \"datePublished\": \"2025-01-21\",\n      }",{"title":2876,"description":2877,"authors":2882,"heroImage":2878,"date":2884,"body":2885,"category":693,"tags":2886},[2883],"Julian Thome","2025-01-21","DevSecOps streamlines software development by allowing teams to ship features quickly and providing short feedback cycles for customers. These short feedback cycles can be used to monitor the impact of a feature from the time it is shipped and to inform developers and product managers about the success or failure of a given deployment.\n\nGitLab, as an agnostic DevSecOps platform, can act as an integration point for different [CI/CD](https://about.gitlab.com/topics/ci-cd/) tools that often contribute to user-facing functionality. For example, the [vulnerability report](https://docs.gitlab.com/ee/user/application_security/vulnerability_report/), which displays all detected vulnerabilities, is visible as a single functionality, but the data in the report may come from a number of different tools in various pipelines.\n\nIn a heterogeneous Static Application Security Testing ([SAST](https://docs.gitlab.com/ee/user/application_security/sast/)) setup we find two potential sources of vulnerability deduplication:\n1. Code volatility refers to the reintroduction of vulnerabilities in a constantly changing code base.\n2. Double reporting refers to duplication introduced by multiple tools that are reporting the same vulnerability. \n\nGitLab addresses these two sources of duplication by means of the [Advanced Vulnerability Tracking](https://docs.gitlab.com/ee/user/application_security/sast/#advanced-vulnerability-tracking) feature, which identifies and deduplicates vulnerabilities in a constantly changing code base.\n\n[Advanced Vulnerability Tracking](https://docs.gitlab.com/ee/user/application_security/sast/#advanced-vulnerability-tracking) leverages contextual information provided by generated syntax-trees to scope vulnerabilities and generates location fingerprints for vulnerabilities that are less fragile across code changes in comparison to other tracking methods.\n\nIn a recent study, we demonstrated that our vulnerability tracking approach was 30% more effective than traditional, line-based vulnerability tracking where `\u003Cfile, line number>` are used to fingerprint vulnerabilities. This means that advanced vulnerability tracking reduces the manual effort of auditing vulnerabilities by 30%. In addition, our study suggested that the positive effect of our vulnerability tracking method increases over time.\n\nThe preprint of our study \"[A scalable, effective and simple Vulnerability Tracking approach for heterogeneous SAST setups based on Scope+Offset](https://about.gitlab.com/resources/downloads/icse25-preprint.pdf)\" will be presented at the [47th International Conference on Software Engineering (Software Engineering in Practice Track) 2025](https://conf.researchr.org/home/icse-2025).\n\n*[Lucas Charles](https://gitlab.com/theoretick), [Jason Leasure](https://gitlab.com/jleasure), and [Hua Yan](https://gitlab.com/hyan3) contributed to this article and study.*",[693,2887,695,9],"security research",{"slug":2889,"featured":6,"template":698},"introducing-gitlab-advanced-vulnerability-tracking","content:en-us:blog:introducing-gitlab-advanced-vulnerability-tracking.yml","Introducing Gitlab Advanced Vulnerability Tracking","en-us/blog/introducing-gitlab-advanced-vulnerability-tracking.yml","en-us/blog/introducing-gitlab-advanced-vulnerability-tracking",{"_path":2895,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2896,"content":2902,"config":2909,"_id":2911,"_type":14,"title":2912,"_source":16,"_file":2913,"_stem":2914,"_extension":19},"/en-us/blog/introducing-gitlab-dedicated-for-government",{"title":2897,"description":2898,"ogTitle":2897,"ogDescription":2898,"noIndex":6,"ogImage":2899,"ogUrl":2900,"ogSiteName":685,"ogType":686,"canonicalUrls":2900,"schema":2901},"Introducing GitLab Dedicated for Government","Learn how our single-tenant SaaS offering, along with our new FedRAMP \"In Process\" designation, will help public sector customers securely advance their modernization objectives.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749667636/Blog/Hero%20Images/Dedicated_Screengrab_1800x945.png","https://about.gitlab.com/blog/introducing-gitlab-dedicated-for-government","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Introducing GitLab Dedicated for Government\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Chris Balane\"},{\"@type\":\"Person\",\"name\":\"Corey Oas\"}],\n        \"datePublished\": \"2024-06-25\",\n      }",{"title":2897,"description":2898,"authors":2903,"heroImage":2899,"date":2906,"body":2907,"category":717,"tags":2908},[2904,2905],"Chris Balane","Corey Oas","2024-06-25","Public sector organizations, as well as companies in highly regulated industries, are transforming software development by adopting modern and efficient cloud-based technologies while safeguarding the security of federal information. Not an easy task. However, with the just-announced GitLab Dedicated for Government offering, we will be providing customers with a FedRAMP-compliant DevSecOps solution through a secure, single-tenant SaaS offering. Now [listed on the FedRAMP Marketplace](https://marketplace.fedramp.gov/products/FR2411959145), GitLab Dedicated for Government will provide all of the benefits of an enterprise DevSecOps platform, with an added focus on data residency, isolation, and private networking to help meet compliance needs.\n\n> To learn more about GitLab Dedicated for Government, and how to secure your software supply chain from code to cloud, reach out to our [sales team](mailto:public-sector@gitlab.com).\n\n## Achieving FedRAMP® certification\n\nThe [Federal Risk and Authorization Management Program](https://www.fedramp.gov/), otherwise known as FedRAMP, has become the gold standard in cloud security, not just for the federal government, but for state and local governments, contractors that aspire to work with government agencies, and security-minded organizations. The U.S. government mandates that cloud services for federal agencies meet strict security standards under FedRAMP. This supports the shift from legacy IT to cost-effective, secure, and scalable cloud-based systems. FedRAMP standards are very rigorous. Organizations must undergo a thorough assessment process, implement necessary security controls, conduct regular audits, and ensure continuous monitoring to meet the stringent criteria set by FedRAMP.\n\nGitLab achieved a major milestone, receiving an \"In Process\" designation for [FedRAMP Moderate Impact Level](https://www.fedramp.gov/baselines/#moderate-impact). This designation is given to cloud service providers working toward a FedRAMP “Authority to Operate” (ATO) status.\n\n**Note:** GitLab also has a provisional certification through the Texas Risk and Authorization Management Program, or [TX-RAMP](https://dir.texas.gov/resource-library-item/tx-ramp-certified-cloud-products), which allows us to work with Texas state agencies.\n\n## Navigating compliance complexities\n\nAs more public sector organizations move away from costly legacy systems and migrate their mission-critical workloads to the cloud, cloud and multi-cloud adoption will grow significantly. At GitLab, we serve a wide variety of customers in the public sector – from federally funded research and development centers and service providers working on behalf of the government, to some of the largest government agencies – and we know that no single deployment model will serve the needs of all of our customers.\n\nOur customers have told us they need a SaaS offering that provides additional deployment control and data residency to meet stringent compliance requirements. We see this need with large enterprises and companies in regulated industries that are coming under increased scrutiny, facing global internet policy fragmentation, and dealing with the expanding complexity of data governance. GitLab has consistently observed that security is a top priority for organizations and our [2024 Global DevSecOps Survey](https://about.gitlab.com/developer-survey/) showed that this trend continued, with security remaining the primary investment area. \n\n## The benefits of GitLab Dedicated for Government\n\nGitLab Dedicated for Government, which aligns to the Cybersecurity and Infrastructure Security Agency's [Secure by Design principles](https://about.gitlab.com/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17/), can help the public sector and highly regulated industries reduce toolchain complexity, and support data residency and protection, all while being hosted and managed by GitLab.\n\n### 1. Toolchain consolidation\nToolchain management continues to be an area where DevSecOps teams are feeling the pressure. Many organizations pay for numerous cybersecurity tools that only serve a single purpose, resulting in a surplus of unused or forgotten products and services. According to our [2024 Global DevSecOps Survey](https://about.gitlab.com/developer-survey/), 64% of survey respondents expressed the need to consolidate their toolchains. Security professionals in particular reported using a lot of tools — 63% of security respondents said they use six or more tools. The result can be unnecessary spend, and added complexities and vulnerabilities, putting organizations at a higher risk of cyber attacks. GitLab Dedicated for Government unites DevSecOps teams in a single platform with a single workflow without the need to buy or maintain other tools. By consolidating complex toolchains, organizations can strengthen security and improve process and operational efficiency.\n\n### 2. Data residency and protection \nGitLab Dedicated for Government is built on top of a FedRAMP-authorized infrastructure, which meets U.S. data sovereignty requirements, including access that is restricted to U.S. citizens. \n\nTo help further protect customer data, GitLab Dedicated for Government supports a secure, private connection between the customer’s virtual private cloud network and GitLab. Therefore, users, data, and services have secure access to the isolated instance without exposing services directly to the internet.\n\n### 3. Managed and hosted by GitLab\nGitLab Dedicated for Government is not only single-tenant (physical isolation between other customers), U.S.-based, and privately connected, but it’s also managed and hosted by GitLab. Organizations can quickly realize the value of a DevSecOps platform, including the advanced flexibility of a self-managed instance, but without requiring staff to build out and manage infrastructure. Organizations get all of the benefits of GitLab — shorter cycle times, lower costs, stronger security, and more productive developers — with lower total cost of ownership and quicker time-to-value than self-hosting. \n\n## How to get started with GitLab Dedicated for Government\nGitLab Dedicated for Government will bring more flexibility and greater choice to the [public sector](https://about.gitlab.com/solutions/public-sector/) and organizations in highly regulated industries that have complex compliance and data residency requirements. The offering will provide the efficiencies of the cloud, but with infrastructure-level isolation and data residency controls. To learn more about GitLab Dedicated for Government, and how to secure your software supply chain from code to cloud, reach out to our [sales team](https://about.gitlab.com/sales/).\n",[186,693,1133,9,717],{"slug":2910,"featured":91,"template":698},"introducing-gitlab-dedicated-for-government","content:en-us:blog:introducing-gitlab-dedicated-for-government.yml","Introducing Gitlab Dedicated For Government","en-us/blog/introducing-gitlab-dedicated-for-government.yml","en-us/blog/introducing-gitlab-dedicated-for-government",{"_path":2916,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2917,"content":2923,"config":2928,"_id":2930,"_type":14,"title":2931,"_source":16,"_file":2932,"_stem":2933,"_extension":19},"/en-us/blog/introducing-gitlabs-new-planner-role-for-agile-planning-teams",{"title":2918,"description":2919,"ogTitle":2918,"ogDescription":2919,"noIndex":6,"ogImage":2920,"ogUrl":2921,"ogSiteName":685,"ogType":686,"canonicalUrls":2921,"schema":2922},"Introducing GitLab’s new Planner role for Agile planning teams","Learn how GitLab’s new Planner role helps Agile teams manage planning workflows, with tailored access across SaaS, Dedicated, and Self-managed solutions.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662488/Blog/Hero%20Images/blog-image-template-1800x945__3_.png","https://about.gitlab.com/blog/introducing-gitlabs-new-planner-role-for-agile-planning-teams","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Introducing GitLab’s new Planner role for Agile planning teams\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Amanda Rueda\"}],\n        \"datePublished\": \"2024-11-25\",\n      }",{"title":2918,"description":2919,"authors":2924,"heroImage":2920,"date":2925,"body":2926,"category":1053,"tags":2927},[1050],"2024-11-25","GitLab launched a new role within the DevSecOps platform – the Planner. Built to align with GitLab’s strategy of providing flexible, role-based access controls, as demonstrated by the release of [custom roles](https://docs.gitlab.com/ee/user/custom_roles.html), the Planner role gives software development teams and planning-focused users access to the tools they need to manage Agile workflows without over-provisioning permissions that could introduce unnecessary risks. By tailoring access to meet specific user needs, the Planner role ensures teams can stay productive while maintaining security and compliance, adhering to the [principle of least privilege](https://about.gitlab.com/blog/the-ultimate-guide-to-least-privilege-access-with-gitlab/).\n\n## Why we created the Planner role\n\nOur journey to this new role started with feedback from our customers and internal teams. We consistently heard that while GitLab offers comprehensive tools for planning and managing Agile development cycles, there was a need for more specific role-based access controls. Product managers, project leads, and other planning roles often required access to planning features but didn’t need full development permissions. In fact, giving them broader access is undesirable, as it increases security risks and potential for errors, such as making unintended changes to code or sensitive configurations. We listened.\n\nThrough user interviews, competitive analysis, and extensive research, we validated the need for a role that grants full access to planning tools while maintaining security by restricting access to developer-centric features.\n\n## What does the Planner role offer?\n\nThe Planner role is a hybrid of the existing [Guest and Reporter roles](https://docs.gitlab.com/ee/user/permissions.html#roles) but designed specifically for those who need access to planning workflows. \n\nHere’s what you can expect:\n\n* Access to key planning tools like epics, roadmaps, issue boards, and [OKRs](https://docs.gitlab.com/ee/user/okrs.html) (*some features may require a GitLab Premium or Ultimate license*)  \n* Enhanced security by limiting unnecessary access to sensitive development features  \n* The Planner role can be used in conjunction with the Enterprise Agile Planning add-on, providing teams with tailored access to planning tools while maintaining security and control.  (*however, the Planner role itself is available on all license tiers*).\n\nThe Planner role is available across all GitLab solutions, including SaaS, GitLab Dedicated, and Self-managed, ensuring that all customers can benefit from this tailored access.\n\nThis role gives teams the flexibility to align permissions with job functions, creating a balance between accessibility and security.\n\n## How the Planner role supports Agile practices\n\nIn [Agile software development](https://about.gitlab.com/blog/categories/agile-planning/), ensuring that each team member has the right tools and permissions to perform their role is crucial for workflow efficiency. The Planner role supports this by allowing planning team members to fully participate in the planning stages of the software development lifecycle without the risk of overstepping into areas like development or deployment.\n\nFrom creating and managing epics to defining roadmaps, the Planner role gives Agile teams the tools they need to stay aligned and productive.\n\n## Customer-centric design\n\nWe didn’t create this role in isolation. We involved our community in the process every step of the way. Through surveys, interviews, and testing, we fine-tuned the permissions to make sure they fit the real-world needs of product and project managers.\n\nThe role also aligns with GitLab’s long-standing mission to be a platform for enterprise Agile teams, giving businesses the flexibility and control to implement Agile methodologies at scale.\n\n## Community feedback and engagement \n\nWe value your input and encourage you to share your experiences with the new Planner role. Your feedback is essential to help refine and improve your GitLab experience. Please visit our [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/503817) to provide your thoughts and suggestions.\n\n## Start planning with GitLab today!\n\nThe Planner role is just one of the many ways GitLab empowers software development teams to plan, collaborate, and deliver efficiently. Whether you’re looking to streamline your product management workflows, improve team collaboration, or align your Agile practices, GitLab has the tools to help you succeed.\n\n> Ready to experience the full power of GitLab? [Sign up for a free GitLab Ultimate trial](https://about.gitlab.com/free-trial/) and start planning your next project with the Planner role, tailored to fit your team's unique needs.\n\n## Read more\n- [Beyond Devs: GitLab Enterprise Agile Planning add-on for all roles](https://about.gitlab.com/blog/gitlab-enterprise-agile-planning-add-on-for-all-roles/)\n- [How to use GitLab for Agile software development](https://about.gitlab.com/blog/gitlab-for-agile-software-development/)\n- [First look: The new Agile planning experience in GitLab](https://about.gitlab.com/blog/first-look-the-new-agile-planning-experience-in-gitlab/)",[908,9,695,717],{"slug":2929,"featured":91,"template":698},"introducing-gitlabs-new-planner-role-for-agile-planning-teams","content:en-us:blog:introducing-gitlabs-new-planner-role-for-agile-planning-teams.yml","Introducing Gitlabs New Planner Role For Agile Planning Teams","en-us/blog/introducing-gitlabs-new-planner-role-for-agile-planning-teams.yml","en-us/blog/introducing-gitlabs-new-planner-role-for-agile-planning-teams",{"_path":2935,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2936,"content":2942,"config":2948,"_id":2950,"_type":14,"title":2951,"_source":16,"_file":2952,"_stem":2953,"_extension":19},"/en-us/blog/its-time-to-put-the-sec-in-devsecops",{"title":2937,"description":2938,"ogTitle":2937,"ogDescription":2938,"noIndex":6,"ogImage":2939,"ogUrl":2940,"ogSiteName":685,"ogType":686,"canonicalUrls":2940,"schema":2941},"It’s time to really put the Sec in DevSecOps","Organizations may tack on security to DevOps but unless they wholly integrate it, they will miss out on DevSecOps benefits.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749671091/Blog/Hero%20Images/lock.jpg","https://about.gitlab.com/blog/its-time-to-put-the-sec-in-devsecops","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"It’s time to really put the Sec in DevSecOps\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Francis Ofungwu\"}],\n        \"datePublished\": \"2023-02-02\",\n      }",{"title":2937,"description":2938,"authors":2943,"heroImage":2939,"date":2945,"body":2946,"category":1277,"tags":2947},[2944],"Francis Ofungwu","2023-02-02","\nWe all know that DevOps and security are intertwined. And a lot of lip service is paid to surface integrations between the two. But until your organization goes [all-in on a DevSecOps strategy](/blog/why-the-market-is-moving-to-a-platform-approach-to-devsecops/#understanding-devops-pain-points) – where Sec is wholly embedded with Dev and Ops, you will miss out on the benefits a holistic approach brings.\n\nToday, the friction between DevOps and security teams comes from objectives that, at first glance, seem diametrically opposed (spoiler alert: they aren’t). Developers want to create great products at the velocity the business requires, and security teams want to effectively manage risks using methodical frameworks that require some level of structure. Day-to-day collaboration between the two groups can be challenging because their workflows and incentives differ.\n\nIn [GitLab’s 2022 Global DevSecOps Survey](/developer-survey/), we found that developers are seeing security scanning increasing across all categories (SAST, DAST, container scanning, dependency scanning, and license compliance), but this uplift is not translating into vulnerability reduction, as 56% of respondents said it was difficult to get developers to actually prioritize fixing code.\n\nAnd so they stay in silos.\n\n## Separation between security and DevOps doesn’t work\n\nWe know through our experiences that security and DevOps often only come together in emergencies. When there is a high-risk incident, such as a breach, security and DevOps teams are forced together on endless incident calls that function more like a “get to know you” exercise driven by rudimentary questions: What does that app do? Why are you using that library with a vulnerability from 2010? What do you mean it’s not exploitable?\n\nWe can – and should – agree that emergencies are not the best time for this level of discovery. You wouldn’t want a firefighter asking if your building is up to code before they start putting out a fire. But due to the lack of frequent collaboration, development and security teams use incidents as the time to play catchup and really dig into the basics of the development lifecycle.\n\n## Sec is more than just a few letters between Dev and Ops\n\nConfusion in the industry hasn’t helped. The industry has come to recognize – and in some cases, exploit – the frustration of these silos. They will plop the “Sec” in between Dev and Ops and market a laundry list of point solutions that solve only a small portion of the problem, and leave DevOps and security teams with a [complex toolchain](/the-source/platform/devops-teams-want-to-shake-off-diy-toolchains-a-platform-is-the-answer/) to manage and maintain. The alarming rate of cyber attacks and breaches in the headlines makes it obvious this approach is not working. So what’s the issue?\n\nI liken where we are now to the challenges that the healthcare industry faced a decade ago in trying to convince physicians of the benefits of hand hygiene. At the time, in the U.S., healthcare-associated infections affected more than 2 million people every year, while compliance with required hygiene standards by healthcare workers was below 40%, [an article from that time period](https://www.hcinnovationgroup.com/home/blog/13020327/the-freakonomics-of-behavior-change-in-healthcare) states. A Los Angeles hospital, aiming to solve this problem, was requiring a 100% hygiene compliance rate among its physicians – should have been a simple task among a population that understands the poor outcomes related to noncompliant behavior, right? No. Several carrot-and-stick approaches to changing behavior of the physicians yielded mixed results.\n\nRelying on humans to change their behavior can be fruitless, the researchers found, according to the article: “Organizations should focus instead on innovations through technology or design.” In other words, we should not rely on behavior change from individuals to drive meaningful, long-lasting transformation. We need to use technology as the invisible hand that reinforces the right behavior and enacts course correction when we deviate from expected actions.\n\nThe corollary is that in the tech industry, we have evangelized for [security and DevOps to be together](https://about.gitlab.com/solutions/security-compliance/) and have talked about why it makes sense (improved software supply chain security, management of threat vectors, and adherence to compliance requirements, for example). When we share the [vision of DevSecOps](/topics/devsecops/) there are head nods and agreements that this unification is the right thing to do for the good of the business, but when it comes down to it, the actual implementation is lacking.\n\n## What it means to be fully DevSecOps\n\nDevSecOps has to be a practice in every sense of the word. It can’t be theory or an academic exercise. DevSecOps should be an implementation of cultural, organizational, and technical changes designed to optimize delivery and maintenance of software. Characteristics of DevSecOps will include:\n- Reducing the time required to deliver quality software.\n- Automating processes required to identify, categorize, and remediate software bugs.\n- Designing the culture and operations of dev, sec, and ops and unifying these functions through values and workflows.\n\nFor DevSecOps as a practice to work, all stakeholders involved in the design, development, and maintenance of software need to commit to transparent collaboration at scale.\n\nWhat this means in action:\n\n- Eliminating one-way communication of security requirements: controls should be programmatically enforced and consumable via APIs.\n- Implementing policy as code: For adoption and consistency, the desired cultural shift and expectations have to be programmatically enforced.\n- Creating a unified view of threats at every level of the development lifecycle: All stakeholders should have insight to the same information that details the quality of the code. Having separate security scanners only operated by the security team does not drive collaboration.\n- Supporting in-context training inside of the development process: Build better developers by offering near real time evidence of vulnerabilities in their environment and code.\n- Reducing the amount of time developers spend in painful audits by investing in immutable development artifacts that evidence use of controls throughout the lifecycle.\n\nAt GitLab, we believe we are strongly positioned to accelerate your organization’s DevSecOps transformation. Our platform helps unify DevSecOps teams and drive the cultural, process, and governance programs required to deliver value to organizations seeking a more effective and sustainable way to develop better, more secure software faster.\n\nCover image by [Georg Bommeli](https://unsplash.com/@calina?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText) on [Unsplash](https://unsplash.com/photos/ybtUqjybcjE?utm_source=unsplash&utm_medium=referral&utm_content=creditCopyText)\n{: .note}\n",[693,2281,9],{"slug":2949,"featured":6,"template":698},"its-time-to-put-the-sec-in-devsecops","content:en-us:blog:its-time-to-put-the-sec-in-devsecops.yml","Its Time To Put The Sec In Devsecops","en-us/blog/its-time-to-put-the-sec-in-devsecops.yml","en-us/blog/its-time-to-put-the-sec-in-devsecops",{"_path":2955,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2956,"content":2961,"config":2966,"_id":2968,"_type":14,"title":2969,"_source":16,"_file":2970,"_stem":2971,"_extension":19},"/en-us/blog/kingfisher-transforming-the-developer-experience-with-gitlab",{"title":2957,"description":2958,"ogTitle":2957,"ogDescription":2958,"noIndex":6,"ogImage":708,"ogUrl":2959,"ogSiteName":685,"ogType":686,"canonicalUrls":2959,"schema":2960},"Kingfisher transforming the developer experience with GitLab","Learn how the international company focuses on DevSecOps, including automation, to reduce complexity in workflows for better efficiency.","https://about.gitlab.com/blog/kingfisher-transforming-the-developer-experience-with-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Kingfisher transforming the developer experience with GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sharon Gaudin\"}],\n        \"datePublished\": \"2024-11-12\",\n      }",{"title":2957,"description":2958,"authors":2962,"heroImage":708,"date":2963,"body":2964,"category":1073,"tags":2965},[1498],"2024-11-12","Kingfisher plc, an international home improvement company, has leaned into GitLab’s end-to-end platform to help it build a DevSecOps foundation that is revolutionizing its developer experience. And the company plans to continue that improvement by increasing its use of platform features, focusing on security, simplifying its toolchain, and increasing the use of automation.\n\n> \u003Cimg align=\"left\" width=\"200\" height=\"200\" hspace=\"5\" vspace=\"5\" alt=\"Chintan Parmar\" src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752176076/Blog/ro7u8p695zw9fllbk4j5.png\" style=\"float: left; margin-right: 25px;\"> “The whole point of this is to reduce friction for our engineers, taking away a lot of the complexity in their workflow, and bringing in best practices and governance,” says Chintan Parmar, site reliability engineering manager at Kingfisher. “In terms of what we've done and what we're doing at the moment, it really is about building a foundation in terms of CI/CD and changing the way we deploy to bring in consistency and improve the developer experience.”\n\nParmar talked about his team and their efforts during the [GitLab DevSecOps World Tour event](https://about.gitlab.com/events/devsecops-world-tour/) in London last month. In an on-stage interview with Sherrod Patching, vice president of Customer Success Management at GitLab, he laid out Kingfisher’s journey with the platform, which is enabling its teams, while also making it easier and faster to move software updates and new projects from ideation to deployment.\n\n[Kingfisher](https://www.kingfisher.com/en/index.html) is a parent company with more than 2,000 stores in eight countries across Europe. Listed on the London Stock Exchange and part of the Financial Times Stock Exchange (FTSE) 100 Index, the group reported £13 billion in total revenue in FY 2023/24. Its brands include B&Q, Screwfix, Castorama, and Brico Depot. \n\nThe company first adopted GitLab in 2016, using a free starter license, and then moved to Premium in 2020. In that time, it also has moved from on-premise to a cloud environment, started using shared GitLab runners and source code management, and began building out a CI/CD library that gives team members easy access to standardized and reusable components for typical pipeline stages, such as build, deploy, and test.\n\n## Tracking metrics that execs care about\n\nKingfisher also is tracking metrics, like deployment frequency, lead time to change, and change failure rates, with GitLab. And teams are analyzing value streams, mapping workflows, and finding bottlenecks. All of those metrics are being translated into data that company leaders can sink their teeth into. \n\n“Execs may not care about whether a merge request has been waiting 15 or 20 minutes, but they do care about how we translate that time value into dollars or pounds,” says Parmar, who used GitLab when he previously worked at [Dunelm Group, plc,](https://about.gitlab.com/customers/dunelm/) another major UK-based retailer. “Kingfisher is a very data-driven organization. We are looking to overlay these metrics to see where we can continue to improve our developer experience, eliminating slowdowns and manual tasks, while increasing automation.”\n\nWhile on-stage, Parmar made it clear that all the changes being made are aimed at improving software development and deployment. However, it’s equally paramount to making team members’ jobs easier, giving them more time and autonomy to do the kind of work they enjoy, instead of what can seem like a never-ending stream of repetitive, manual tasks. He noted that the team is so focused on easing workflows and giving engineers more time to be innovative, it has created a “developer experience squad.”\n\n## Putting people first while laying out priorities\n\nSo what’s coming next for Kingfisher and its engineering squads, which have about 600 practitioners?  \n\nAccording to Parmar, Kingfisher already has its priorities mapped out. Using GitLab to [move security left](https://about.gitlab.com/solutions/security-compliance/) is at the top of their list. The group also is focused on continuing to reduce its toolchain, and using automation to increase productivity. And he expects that early in 2025, teams will begin “dabbling” with the artificial intelligence capabilities in [GitLab Duo](https://about.gitlab.com/gitlab-duo/), a suite of AI-powered features in the platform that help increase velocity and solve key pain points across the software development lifecycle. Kingfisher will focus on how that can further increase its efficiency and productivity. \n\nTo get all of this done, Parmar says the first step is to ensure that people come first.\n\n“We’re focused on the hearts and minds of our people... and remembering that people can be attached to how they work through pipelines,” he adds. “People have different ways of building their pipelines. We need to understand what they need, what their workflows look like, and then work with them to find the right solution. After, we’ll go back to them with data that shows the improvements worked. So instead of telling them what they need, we find out what that is, and fix what’s slowing them down. That builds a very good rapport with our engineers.”\n\nChanging how a team creates and deploys software is a journey. Parmar suggests that collaboratively taking developers and security teams on that journey, instead of dragging them along, makes a big difference in ease of migration and in easing team members’ user experience.\n\n> Learn [how other GitLab customers use the DevSecOps platform](https://about.gitlab.com/customers/) to gain results for customers.\n",[1075,9,951,888],{"slug":2967,"featured":91,"template":698},"kingfisher-transforming-the-developer-experience-with-gitlab","content:en-us:blog:kingfisher-transforming-the-developer-experience-with-gitlab.yml","Kingfisher Transforming The Developer Experience With Gitlab","en-us/blog/kingfisher-transforming-the-developer-experience-with-gitlab.yml","en-us/blog/kingfisher-transforming-the-developer-experience-with-gitlab",{"_path":2973,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2974,"content":2980,"config":2985,"_id":2987,"_type":14,"title":2988,"_source":16,"_file":2989,"_stem":2990,"_extension":19},"/en-us/blog/last-year-we-signed-the-secure-by-design-pledge-heres-our-progress",{"title":2975,"description":2976,"ogTitle":2975,"ogDescription":2976,"noIndex":6,"ogImage":2977,"ogUrl":2978,"ogSiteName":685,"ogType":686,"canonicalUrls":2978,"schema":2979},"Last year we signed the Secure by Design pledge - here's our progress","Learn about GitLab's CISA-aligned additions and improvements around MFA, default password reduction, patching, and vulnerability disclosure.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659684/Blog/Hero%20Images/AdobeStock_479904468__1_.jpg","https://about.gitlab.com/blog/last-year-we-signed-the-secure-by-design-pledge-heres-our-progress","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Last year we signed the Secure by Design pledge - here's our progress\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Joseph Longo\"}],\n        \"datePublished\": \"2025-06-09\",\n      }",{"title":2975,"description":2976,"authors":2981,"heroImage":2977,"date":2982,"body":2983,"category":693,"tags":2984},[2412],"2025-06-09","A little over a year go, GitLab signed [CISA’s Secure by Design Pledge](https://about.gitlab.com/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17/), a directive for technology providers to embed security at the heart of their products from the outset of development. Since then, we've made significant progress towards improving our security posture and creating a more secure ecosystem for our customers to develop secure software faster.\n\n## Meeting the security goals\n\nLet’s explore the additions and improvements we've made to further enhance security across the development lifecycle.\n\n### Multi-factor authentication (MFA)\n\n***Goal: Within one year of signing the pledge, demonstrate actions taken to measurably increase the use of multi-factor authentication across the manufacturer’s products.***\n\nGitLab currently offers multiple [MFA](https://docs.gitlab.com/ee/user/profile/account/two_factor_authentication.html) options for users to secure their accounts. We also offer SSO functionality to enable [GitLab.com](https://docs.gitlab.com/ee/user/group/saml_sso/), [Self-Managed](https://docs.gitlab.com/integration/saml/), and [GitLab Dedicated](https://docs.gitlab.com/integration/saml/) customers to streamline their authentication processes and their internal MFA requirements.\n\nTo further enhance the platform’s resilience, and to create a more secure foundation for our customers, GitLab is executing a phased MFA by Default rollout.\n\nIn the coming months, we will deploy changes requiring all customers to enable MFA on their accounts. \n\nFor customers who already have MFA enabled or authenticate to GitLab via their organization’s single sign-on (SSO) method, there will be no necessary changes. For customers who do not already have MFA enabled and are not authenticating to GitLab via their organization’s SSO method, they will be required to enable MFA and enroll in one or more of the available MFA methods.\n\nThe MFA rollout will occur in stages to ensure a smooth and consistent adoption across all customers. More details on GitLab’s MFA by Default rollout will be shared in the near future.\n\n### Default passwords\n\n***Goal: Within one year of signing the pledge, demonstrate measurable progress towards reducing default passwords across the manufacturers’ products.***\n\nTo reduce the use of default passwords, GitLab uses randomly generated root passwords for its multiple installation methods. GitLab’s multi-method [installation instructions](https://docs.gitlab.com/ee/install/install_methods.html) also include guidance on how to change the randomly generated root password for each installation.\n\nFor some install methods, such as installing GitLab in a Docker container, the password file with the initial root password is deleted in the first container restart after 24 hours to help further harden the GitLab instance.\n\n### Reducing entire classes of vulnerabilities\n\n***Goal: Within one year of signing the pledge, demonstrate actions taken towards enabling a significant measurable reduction in the prevalence of one or more vulnerability classes across the manufacturer’s products.***\n\nGitLab has published [secure coding guidelines](https://docs.gitlab.com/ee/development/secure_coding_guidelines.html#sast-coverage) to its documentation site that contains descriptions and guidelines for addressing security vulnerabilities commonly identified in the GitLab codebase. \n\nThe guidelines are “intended to help developers identify potential security vulnerabilities early, with the goal of reducing the number of vulnerabilities released over time.” \n\nGitLab continues to improve its [SAST rule coverage](https://docs.gitlab.com/development/secure_coding_guidelines#sast-coverage) to address broader sets of security vulnerabilities for itself and its customers.\n\n### Security patches\n\n***Goal: Within one year of signing the pledge, demonstrate actions taken to measurably increase the installation of security patches by customers.***\n\nGitLab handles all updates related to its GitLab.com and GitLab Dedicated service offerings. Additionally, GitLab publishes a [maintenance policy](https://docs.gitlab.com/ee/policy/maintenance.html), which outlines its approach to releasing updates, backporting, upgrade recommendations and supporting documentation, etc.\n\nGitLab’s documentation has comprehensive guidance on [how to upgrade](https://docs.gitlab.com/ee/update/?tab=Self-compiled+%28source%29#upgrade-based-on-installation-method) self-managed instances based on their deployment model. This includes Omnibus, Helm chart, Docker and self-compiled GitLab installations.\n\nGitLab also provides a detailed [upgrade plan](https://docs.gitlab.com/ee/update/plan_your_upgrade.html) to ensure proper testing and troubleshooting can be performed as well as rollback plans if necessary.\n\nDepending on the version upgrade, specific changes ([example for GitLab 17](https://docs.gitlab.com/ee/update/versions/gitlab_17_changes.html)) for each version are highlighted to ensure a smooth upgrade process and limit unavailability of services.\n\n### Vulnerability disclosure policy\n\n***Goal: Within one year of signing the pledge, publish a vulnerability disclosure policy (VDP).***\n\nGitLab maintains a strong bug bounty program through [HackerOne](https://hackerone.com/gitlab?type=team), a [security.txt](https://gitlab.com/.well-known/security.txt) file highlighting GitLab’s preferred and additional disclosure processes, and [release posts](https://about.gitlab.com/releases/categories/releases/) highlighting security fixes.\n\nCustomers and the general public can subscribe to receive GitLab’s release posts directly in their email inbox.\n\n### Common vulnerability enumerations \n\n***Goal: Within one year of signing the pledge, demonstrate transparency in vulnerability reporting***\n\nGitLab includes the Common Weakness Enumeration (CWE) field in all Common vulnerability enumerations (CVE) records it publishes. Over the past year, GitLab has iterated to also include the Common Platform Enumeration (CPE) field in CVE records.\n\nThe GitLab [CVE assignments project](https://gitlab.com/gitlab-org/cves) stores a copy of all CVE identifiers assigned and published by GitLab in its role as a CVE Numbering Authority.\n\n> Check out [GitLab’s CVE submission template](https://gitlab.com/gitlab-org/cves/-/blob/master/.gitlab/issue_templates/Internal%20GitLab%20Submission.md?ref_type=heads).\n\n### Evidence of intrusions\n\n***Goal: Within one year of signing the pledge, demonstrate a measurable increase in the ability for customers to gather evidence of cybersecurity intrusions affecting the manufacturer’s products.***\n\nGitLab has published an [incident response guide](https://docs.gitlab.com/ee/security/responding_to_security_incidents.html) to help customers respond to incidents involving GitLab instances. Additionally, GitLab has open sourced versions of its [GUARD detection-as-code](https://about.gitlab.com/blog/unveiling-the-guard-framework-to-automate-security-detections-at-gitlab/) and TLDR threat detection frameworks. The repositories for those open source frameworks can be found on [GitLab’s Open Source Security Center](https://about.gitlab.com/security/open-source-resources/).\n\nIn a similar manner, GitLab is adding functionality to its [GitLab.com](http://gitLab.com) service offering to [detect compromised passwords](https://about.gitlab.com/blog/introducing-compromised-password-detection-for-gitlab-com/) for all logins using GitLab’s native username and password authentication method.\n\n## What's next\n\n[GitLab’s Security Division’s mission](https://gitlab.com/gitlab-com/gl-security) is to enable everyone to innovate and succeed on a safe, secure, and trusted DevSecOps platform.\n\nGitLab's security enhancements over the past year have allowed us to demonstrate our commitment to CISA’s Secure by Design Pledge, and they have strengthened our platform and given customers a more reliable and secure foundation to build on. \n\nOur commitment to iteration means we're already focused on the next set of innovations that will drive us forward.\n\n> To learn more about GitLab’s security enhancements, bookmark our [security page on the GitLab Blog](https://about.gitlab.com/blog/categories/security/).\n\n## Read more  \n- [Secure by Design principles meet DevSecOps innovation in GitLab 17](https://about.gitlab.com/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17/)\n- [Happy birthday, Secure by Design!](https://about.gitlab.com/blog/happy-birthday-secure-by-design/)\n- [Strengthen your cybersecurity strategy with Secure by Design](https://about.gitlab.com/the-source/security/strengthen-your-cybersecurity-strategy-with-secure-by-design/)",[951,9,693,186],{"slug":2986,"featured":91,"template":698},"last-year-we-signed-the-secure-by-design-pledge-heres-our-progress","content:en-us:blog:last-year-we-signed-the-secure-by-design-pledge-heres-our-progress.yml","Last Year We Signed The Secure By Design Pledge Heres Our Progress","en-us/blog/last-year-we-signed-the-secure-by-design-pledge-heres-our-progress.yml","en-us/blog/last-year-we-signed-the-secure-by-design-pledge-heres-our-progress",{"_path":2992,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":2993,"content":2999,"config":3004,"_id":3006,"_type":14,"title":3007,"_source":16,"_file":3008,"_stem":3009,"_extension":19},"/en-us/blog/learning-python-with-a-little-help-from-ai-code-suggestions",{"title":2994,"description":2995,"ogTitle":2994,"ogDescription":2995,"noIndex":6,"ogImage":2996,"ogUrl":2997,"ogSiteName":685,"ogType":686,"canonicalUrls":2997,"schema":2998},"Learning Python with a little help from AI","Use this guided tutorial, along with GitLab Duo Code Suggestions, to learn a new programming language.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663918/Blog/Hero%20Images/aipower.jpg","https://about.gitlab.com/blog/learning-python-with-a-little-help-from-ai-code-suggestions","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Learning Python with a little help from AI\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Michael Friedrich\"}],\n        \"datePublished\": \"2023-11-09\",\n      }",{"title":2994,"description":2995,"authors":3000,"heroImage":2996,"date":3001,"body":3002,"category":801,"tags":3003},[1379],"2023-11-09","Learning a new programming language can help broaden your software\ndevelopment expertise, open career opportunities, or create fun challenges.\nHowever, it can be difficult to decide on one specific approach to learning\na new language. Artificial intelligence (AI) can help. In this tutorial,\nyou'll learn how to leverage AI-powered GitLab Duo Code Suggestions for a\nguided experience in learning the Python programming language with a\npractical hands-on example.\n\n\n- [Preparations](#preparations)\n  - [VS Code](#vs-code)\n  - [Code Suggestions](#code-suggestions)\n- [Learning a new programming language:\nPython](#learning-a-new-programming-language-python)\n    - [Development environment for Python](#development-environment-for-python)\n    - [Hello, World](#hello-world)\n- [Start learning Python with a practical\nexample](#start-learning-python-with-a-practical-example)\n    - [Define variables and print them](#define-variables-and-print-them)\n    - [Explore variable types](#explore-variable-types)\n- [File I/O: Read and print a log file](#file-io-read-and-print-a-log-file)\n\n- [Flow control](#flow-control)\n    - [Loops and lists to collect files](#loops-and-lists-to-collect-files)\n    - [Conditionally collect files](#conditionally-collect-files)\n- [Functions](#functions)\n    - [Start with a simple log format](#start-with-a-simple-log-format)\n    - [String and data structure operations](#string-and-data-structure-operations)\n    - [Parse log files using regular expressions](#parse-log-files-using-regular-expressions)\n    - [Advanced log format: auth.log](#advanced-log-format-authlog)\n    - [Parsing more types: Structured logging](#parsing-more-types-structured-logging)\n- [Printing results and formatting](#printing-results-and-formatting)\n\n- [Dependency management and continuous\nverification](#dependency-management-and-continuous-verification)\n    - [Pip and pyenv: Bringing structure into Python](#pip-and-pyenv-bringing-structure-into-python)\n    - [Automation: Configure CI/CD pipeline for Python](#automation-configure-cicd-pipeline-for-python)\n- [What is next](#what-is-next)\n    - [Async learning exercises](#async-learning-exercises)\n    - [Share your feedback](#share-your-feedback)\n\n## Preparations \n\n\nChoose your [preferred and supported\nIDE](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html#enable-code-suggestions-in-other-ides-and-editors),\nand follow the documentation to enable Code Suggestions for [GitLab.com\nSaaS](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html#enable-code-suggestions-on-gitlab-saas)\nor [GitLab self-managed\ninstances](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html#enable-code-suggestions-on-self-managed-gitlab).\n\n\nProgramming languages can require installing the language interpreter\ncommand-line tools or compilers that generate binaries from source code to\nbuild and run the application.\n\n\n**Tip:** You can also use [GitLab Remote Development\nworkspaces](/blog/quick-start-guide-for-gitlab-workspaces/) to\ncreate your own cloud development environments, instead of local development\nenvironments. This blog post focuses on using VS Code and the GitLab Web\nIDE. \n\n\n### VS Code\n\n\n[Install VS Code](https://code.visualstudio.com/download) on your client,\nand open it. Navigate to the `Extensions` menu and search for `gitlab\nworkflow`. Install the [GitLab Workflow extension for VS\nCode](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow).\nVS Code will also detect the programming languages, and offer to install\nadditional plugins for syntax highlighting and development experience. For\nexample, install the [Python\nextension](https://marketplace.visualstudio.com/items?itemName=ms-python.python).\n\n\n### Code Suggestions\n\n\nFamiliarize yourself with suggestions before actually verifying the\nsuggestions. GitLab Duo Code Suggestions are provided as you type, so you do\nnot need use specific keyboard shortcuts. To accept a code suggestion, press\nthe `tab` key. Also note that writing new code works more reliably than\nrefactoring existing code. AI is non-deterministic, which means that the\nsame suggestion may not be repeated after deleting the code suggestion.\nWhile Code Suggestions is in Beta, we are working on improving the accuracy\nof generated content overall. Please review the [known\nlimitations](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html#known-limitations),\nas this could affect your learning experience.\n\n\n**Tip:** The latest release of Code Suggestions supports multiline\ninstructions. You can refine the specifications to your needs to get better\nsuggestions. We will practice this method throughout the blog post.\n\n\n## Learning a new programming language: Python  \n\n\nNow, let's dig into learning Python, which is one of the [supported\nlanguages in Code\nSuggestions](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html#supported-languages). \n\n\nBefore diving into the source code, make sure to set up your development\nenvironment.\n\n\n### Development environment for Python \n\n\n1) Create a new project `learn-python-ai` in GitLab, and clone the project\ninto your development environment. All code snippets are available in this\n[\"Learn Python with AI\"\nproject](https://gitlab.com/gitlab-de/use-cases/ai/learn-with-ai/learn-python-ai).\n\n\n```shell\n\ngit clone https://gitlab.com/NAMESPACE/learn-python-ai.git\n\n\ncd learn-python-ai\n\n\ngit status\n\n```\n\n\n2) Install Python and the build toolchain. Example on macOS using Homebrew:\n\n\n```\n\nbrew install python\n\n```\n\n\n3) Consider adding a `.gitignore` file for Python, for example this\n[.gitignore template for\nPython](https://gitlab.com/gitlab-org/gitlab/-/blob/master/vendor/gitignore/Python.gitignore?ref_type=heads). \n\n\nYou are all set to learn Python! \n\n\n### Hello, World\n\n\nStart your learning journey in the [official\ndocumentation](https://www.python.org/about/gettingstarted/), and review the\nlinked resources, for example, the [Python\ntutorial](https://docs.python.org/3/tutorial/index.html). The\n[library](https://docs.python.org/3/library/index.html) and [language\nreference](https://docs.python.org/3/reference/index.html) documentation can\nbe helpful, too. \n\n\n**Tip:** When I touched base with Python in 2005, I did not have many use\ncases except as a framework to test Windows 2000 drivers. Later, in 2016, I\nrefreshed my knowledge with the book \"Head First Python, 2nd Edition,\"\nproviding great practical examples for the best learning experience – two\nweeks later, I could explain the differences between Python 2 and 3. You do\nnot need to worry about Python 2 – it has been deprecated some years ago,\nand we will focus only on Python 3 in this blog post. In August 2023, \"[Head\nFirst Python, 3rd\nEdition](https://www.oreilly.com/library/view/head-first-python/9781492051282/)\"\nwas published. The book provides a great learning resource, along with the\nexercises shared in this blog post. \n\n\nCreate a new file `hello.py` in the root directory of the project and start\nwith a comment saying `# Hello world`. Review and accept the suggestion by\npressing the `tab` key and save the file (keyboard shortcut: cmd s). \n\n\n```\n\n# Hello world\n\n```\n\n\nCommit the change to the Git repository. In VS Code, use the keyboard\nshortcut `ctrl shift G`, add a commit message, and hit `cmd enter` to\nsubmit. \n\n\nUse the command palette (`cmd shift p`) and search for `create terminal` to\nopen a new terminal. Run the code with the Python interpreter. On macOS, the\nbinary from Homebrew is called `python3`, other operating systems and\ndistributions might use `python` without the version.\n\n\n```shell\n\npython3 hello.py\n\n```\n\n\n![Hello World, hello GitLab Duo Code\nSuggestions](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_python_code_sugestions_hello_world.png)\n\n\n**Tip:** Adding code comments in Python starting with the `#` character\nbefore you start writing a function or algorithm will help Code Suggestions\nwith more context to provide better suggestions. In the example above, we\ndid that with `# Hello world`, and will continue doing so in the next\nexercises.\n\n\nAdd `hello.py` to Git, commit all changes and push them to your GitLab\nproject.\n\n\n```shell\n\ngit add hello.py\n\n\ngit commit -avm \"Initialize Python\"\n\n\ngit push\n\n```\n\n\nThe source code for all exercises in this blog post is available in this\n[\"Learn Python with AI\"\nproject](https://gitlab.com/gitlab-de/use-cases/ai/learn-with-ai/learn-python-ai).\n\n\n## Start learning Python with a practical example \n\n\nThe learning goal in the following sections involves diving into the\nlanguage datatypes, variables, flow control, and functions. We will also\nlook into file operations, string parsing, and data structure operations for\nprinting the results. The exercises will help build a command-line\napplication that reads different log formats, works with the data, and\nprovides a summary. This will be the foundation for future projects that\nfetch logs from REST APIs, and inspire more ideas such as rendering images,\ncreating a web server, or adding Observability metrics.\n\n\n![Parsing log files into structured objects, example result after following\nthe\nexercises](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_terminal_parsing_logs_and_pretty_print_results.png)\n\n\nAs an experienced admin, you can put the script into production and use\nreal-world log format exmples. Parsing and analyzing logs in stressful\nproduction incidents can be time-consuming. A local CLI tool is sometimes\nfaster than a log management tool.\n\n\nLet's get started: Create a new file called `log_reader.py` in the directory\nroot, add it to Git, and create a Git commit.\n\n\n### Define variables and print them\n\n\nAs a first step, we need to define the log files location, and the expected\nfile suffix. Therefore, let's create two variables and print them. Actually,\nask Code Suggestions to do that for you by writing only the code comments\nand accepting the suggestions. Sometimes, you need to experiment with\nsuggestions and delete already accepted code blocks. Do not worry – the\nquality of the suggestions will improve over time as the model generates\nbetter suggestions with more context.\n\n\n![Define log path and file suffix\nvariables](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_log_reader_variables_01.png){:\n.shadow}\n\n\n![Print the variables to\nverify](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_log_reader_variables_02.png){:\n.shadow}\n\n\n```python\n\n# Specify the path and file suffix in variables\n\npath = '/var/log/'\n\nfile_suffix = '.log'\n\n\n# Print the variables \n\n\nprint(path)\n\nprint(file_suffix)\n\n```\n\n\nNavigate into the VS Code terminal and run the Python script:\n\n\n```shell\n\npython3 log_reader.py\n\n```\n\n\n![VS Code terminal, printing the\nvariables](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_terminal_print_variables.png)\n\n\nPython supports many different types in the [standard\nlibrary](https://docs.python.org/3/library/index.html). Most common types\nare: Numeric (int, float, complex), Boolean (True, False), and String (str).\nData structures include support for lists, tuples, and dictionaries. \n\n\n### Explore variable types \n\n\nTo practice different variable types, let's define a limit of log files to\nread as a variable with the `integer` type.\n\n\n![Log file\nvariable](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_log_reader_variables_03.png){:\n.shadow}\n\n\n```python\n\n# Define log file limit variable \n\nlog_file_limit = 1024 \n\n```\n\n\nCreate a Boolean variable that forces to read all files in the directory, no\nmatter the log file suffix. \n\n\n```python\n\n# Define boolean variable whether to read all files recursively\n\nread_all_files_recursively = True\n\n```\n\n\n## File I/O: Read and print a log file\n\n\nCreate a directory called `log-data` in your project tree. You can copy all\nfile examples from the [log-data directory in the example\nproject](https://gitlab.com/gitlab-de/use-cases/ai/learn-with-ai/learn-python-ai/-/tree/main/log-data?ref_type=heads).  \n\n\nCreate a new file `sample.log` with the following content, or any other two\nlines that provide a different message at the end.\n\n\n```\n\nOct 17 00:00:04 ebpf-chaos systemd[1]: dpkg-db-backup.service: Deactivated\nsuccessfully.\n\nOct 17 00:00:04 ebpf-chaos systemd[1]: Finished Daily dpkg database backup\nservice.\n\n```\n\n\nInstruct Code Suggestions to read the file `log-data/sample.log` and print\nthe content. \n\n\n![Code Suggestions: Read log file and print\nit](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_read_log_file_and_print.png){:\n.shadow}\n\n\n```python\n\n# Read the file in log-data/sample.log and print its content\n\nwith open('log-data/sample.log', 'r') as f:\n    print(f.read())\n```\n\n\n**Tip:** You will notice the indent here. The `with open() as f:` statement\nopens a new scope where `f` is available as stream. This flow requires\nindenting )`tab`) the code block, and perform actions in this scope, calling\n`f.read()` to read the file contents, and passing the immediate value as\nparameter into the `print()` function.\n\n\nNavigate into the terminal, and run the script again with `python3\nlog_reader.py`. You will see the file content shown in the VS Code editor,\nalso printed into the terminal.\n\n\n![VS Code terminal: Read log file, and print\nit](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_terminal_print_logfile_show_file_sample.png)\n\n\n## Flow control \n\n\nReading one log file is not enough – we want to analyze all files in a given\ndirectory recursively. For the next exercise, we instruct Code Suggestions\nto create an index of all files. \n\n\nPrepare the `log-data` directory with more example files from the [log-data\ndirectory in the example\nproject](https://gitlab.com/gitlab-de/use-cases/ai/learn-with-ai/learn-python-ai/-/tree/main/log-data?ref_type=heads).\nThe directory tree should look as follows:\n\n\n```shell\n\ntree log-data                                                             ─╯\n\nlog-data\n\n├── sample.log\n\n└── var\n    └── log\n        ├── auth.log\n        ├── syslog.log\n        └── syslog_structured.log\n\n3 directories, 4 files\n\n```\n\n\n### Loops and lists to collect files \n\n\nModify the `path` variable to use the value `log-data/`. \n\n\n```python\n\n# Specify the path and file suffix in variables\n\npath = 'log-data/'\n\nfile_suffix = '.log'\n\n```\n\n\nTell Code Suggestions to read all file paths in the directory into a list.\nAfter the collection loop, print the list of file paths. \n\n\n```python\n\n# Read all file paths in the directory into a list\n\n\n# Print the list of log file paths\n\n```\n\n\n![Code Suggestion, collect file\npaths](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_collect_files.png){:\n.shadow}\n\n\nAn example of a suggestion could look like this:\n\n\n```python\n\n# Read all file paths in the directory into a list\n\nimport os\n\n\n# Read all file paths in the directory into a list\n\nlog_files = []\n\nfor root, directories, files in os.walk(path):\n    for file in files:\n        if file.endswith(file_suffix):\n            log_files.append(os.path.join(root, file))\n\n# Print the list of log file paths\n\n\nprint(log_files)\n\n```\n\n\nLet's analyze what happens here: The `import os` statement is required to\nbring the `os` library into the current scope, and we are able to call\n`os.walk()` later. The `log_files`\n[list](https://docs.python.org/3/library/stdtypes.html#sequence-types-list-tuple-range)\nis initialized as an empty list using empty brackets. `os.walk()` expects a\npath as directory tree to start searching for files. The function returns a\ngenerator object, which will walk the directory tree recursively. The\nreturned tuples include the collected files, which we want to access in a\nloop iteraor, using the `for` loop notation. When the file ends with the\ndefined suffix, its full path is appended to the list. This is an optimized\nand complex example. Do not worry about understanding everything just yet -\nwe will revisit this later. \n\n\n### Conditionally collect files \n\n\nModify the instructions and add the Boolean flag evaluation for\n`read_all_files_recursively` as an input to the glob() function. \n\n\n```python\n\n# Read all file paths in the top level directory into a list\n\n# If read_all_files_recursively is True, read all files recursively\n\n```\n\n\n![Code Suggestions, read files recursively\nconditionally](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_read_files_recursively_conditionally.png){:\n.shadow}\n\n\n```\n\n# Read all file paths in the top level directory \n\nimport os\n\n\n# Read all file paths in the top level directory into a list\n\n# If read_all_files_recursively is True, read all files recursively\n\n\nlog_files = []\n\nfor file in os.listdir(path):\n    if file.endswith(file_suffix):\n        log_files.append(os.path.join(path, file))\n\nif read_all_files_recursively:\n    for root, directories, files in os.walk(path):\n        for file in files:\n            if file.endswith(file_suffix):               \n                log_files.append(os.path.join(root, file))\n\n# Print log_files\n\nprint(log_files)\n\n```\n\n\nThe result is not optimal yet because it always executes the first loop, and\noptionally the second loop. This flow leads to duplicated results when the\nscript is executed.\n\n\n```\n\npython3 log_reader.py\n\n\n['log-data/sample.log', 'log-data/sample.log', 'log-data/var/log/auth.log']\n\n```\n\n\nExperiment with Code Suggestions instructions to get a solution for the\nproblem. There are different approaches you can take: \n\n\n1) A potential solution is to wrap the source code into an if-then-else\nblock, and move the `os.listdir()` loop into the else-block. \n\n\n```python\n\nif read_all_files_recursively:\n    for root, directories, files in os.walk(path):\n        for file in files:\n            if file.endswith(file_suffix):               \n                log_files.append(os.path.join(root, file))\nelse:\n    for file in os.listdir(path):\n        if file.endswith(file_suffix):\n            log_files.append(os.path.join(path, file))  \n\n```\n\n\n2) Alternatively, do not use `append()` to always add a new list entry, but\ncheck if the item exists in the list first. \n\n\n```python\n\nfor file in os.listdir(path):\n    if file.endswith(file_suffix):\n        # check if the entry exists in the list already\n        if os.path.isfile(os.path.join(path, file)):\n            log_files.append(os.path.join(path, file))\n\nif read_all_files_recursively:\n    for root, directories, files in os.walk(path):\n        for file in files:\n            if file.endswith(file_suffix):\n                # check if the entry exists in the list already\n                if file not in log_files:\n                    log_files.append(os.path.join(root, file))\n```\n\n\n3) Or, we could eliminate duplicate entries after collecting all items.\nPython allows converting lists into\n[sets](https://docs.python.org/3/library/stdtypes.html#set-types-set-frozenset),\nwhich hold unique entries. After applying `set()`, you can again convert the\nset back into a list. Code Suggestions knows about this possibility, and\nwill help with the comment `# Ensure that only unique file paths are in the\nlist` \n\n\n![Code Suggestions, converting a list to unique\nitems](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_unique_list.png){:\n.shadow}\n\n\n```python\n\n# Ensure that only unique file paths are in the list\n\n\nlog_files = list(set(log_files))\n\n```\n\n\n4) Take a step back and evaluate whether the variable\nread_all_files_recursively makes sense. Maybe the default behavior should\njust be reading all files recursively?\n\n\n**Tip for testing different paths in VS Code:** Select the code blocks, and\npress [`cmd /` on\nmacOS](https://code.visualstudio.com/docs/getstarted/keybindings) to comment\nout the code. \n\n\n## Functions \n\n\nLet's create a function called `parse_log_file` that parses a log file, and\nreturns the extracted data. We will define the expected log format and\ncolumns to extract, following the [syslog format\nspecification](https://en.wikipedia.org/wiki/Syslog). There are different\nlog format types and also customized formats by developers that need to be\ntaken into account – exercise for later. \n\n\n### Start with a simple log format \n\n\nInspect a running Linux VM, or use the following example log file example\nfor additional implementation.\n\n\n```\n\nless /var/log/syslog | grep -v docker \n\n\nOct 17 00:00:04 ebpf-chaos systemd[1]: Starting Daily dpkg database backup\nservice...\n\nOct 17 00:00:04 ebpf-chaos systemd[1]: Starting Rotate log files...\n\nOct 17 00:00:04 ebpf-chaos systemd[1]: dpkg-db-backup.service: Deactivated\nsuccessfully.\n\nOct 17 00:00:04 ebpf-chaos systemd[1]: Finished Daily dpkg database backup\nservice.\n\nOct 17 00:00:04 ebpf-chaos systemd[1]: logrotate.service: Deactivated\nsuccessfully.\n\nOct 17 00:00:04 ebpf-chaos systemd[1]: Finished Rotate log files.\n\nOct 17 00:17:01 ebpf-chaos CRON[727495]: (root) CMD (   cd / && run-parts\n--report /etc/cron.hourly)\n\n```\n\n\nWe can create an algorithm to split each log line by whitespaces, and then\njoin the results again. Let's ask Code Suggestions for help. \n\n\n```python\n\n# Split log line \"Oct 17 00:00:04 ebpf-chaos systemd[1]: Finished Rotate log\nfiles.\" by whitespaces and save in a list\n\n\nlog_line = \"Oct 17 00:00:04 ebpf-chaos systemd[1]: Finished Rotate log\nfiles.\"\n\nlog_line_split = log_line.split(\" \")\n\nprint(log_line_split)\n\n```\n\n\nRun the script again to verify the result.\n\n\n```shell\n\npython3 log_reader.py\n\n\n['Oct', '17', '00:00:04', 'ebpf-chaos', 'systemd[1]:', 'Finished', 'Rotate',\n'log', 'files.']\n\n```\n\n\nThe first three items are part of the datetime string, followed by the host,\nservice, and remaining log message items. Let's practice string operations\nin Python as the next step. \n\n\n### String and data structure operations\n\n\nLet's ask Code Suggestions for help with learning to join strings, and\nperform list operations.\n\n\n1. Join the first three items with a whitespace again. \n\n2. Keep host and service. \n\n3. Join the remaining variable item count into a string, separated with\nwhitespaces, again. \n\n4. Store the identified column keys, and their respective values in a new\ndata structure:\n[dictionary](https://docs.python.org/3/library/stdtypes.html#mapping-types-dict). \n\n\n![Code suggestions for list items with string\noperations](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_list_items_string_join_extract.png){:\n.shadow}\n\n\n```shell \n\npython3 log_reader.py\n\n\n# Array\n\n['Oct', '17', '00:00:04', 'ebpf-chaos', 'systemd[1]:', 'Finished', 'Rotate',\n'log', 'files.']\n\n\n# Dictionary \n\n{'datetime': 'Oct 17 00:00:04', 'host': 'ebpf-chaos', 'service':\n'systemd[1]:', 'message': ' ebpf-chaos systemd[1]: Finished Rotate log\nfiles.'}\n\n```\n\n\nA working suggestion can look like the following:\n\n\n```python\n\n# Initialize results dictionary with empty values for datetime, host,\nservice, message\n\n# Loop over log line split \n\n# Join the first three list items as date string\n\n# Item 4: host \n\n# Item 5: service\n\n# Join the remaining items into a string, separated with whitespaces \n\n# Print the results after the loop \n\n\nresults = {'datetime': '', 'host': '', 'service': '', 'message': ''}\n\n\nfor item in log_line_split:\n\n    if results['datetime'] == '':\n        results['datetime'] = ' '.join(log_line_split[0:3])\n\n    elif results['host'] == '':\n        results['host'] = log_line_split[3]\n\n    elif results['service'] == '':\n        results['service'] = log_line_split[4]\n\n    else:\n        results['message'] += ' ' + item\n\nprint(results)\n\n\n```\n\n\nThe suggested algorithm loops over all log line items, and applies the same\noperation for the first three items. `log_line_split[0:3]` extracts a slice\nof three items into a new list. Calling `join()` on a separator character\nand passing the array as an argument joins the items into a string. The\nalgorithm continues to check for not initialized values for host (Item 4)\nand service (Item 5)and concludes with the remaining list items appended\ninto the message string. To be honest, I would have used a slightly\ndifferent algorithm, but it is a great learning curve to see other\nalgorithms, and ways to implement them. Practice with different\ninstructions, and data structures, and continue printing the data sets. \n\n\n**Tip:** If you need to terminate a script early, you can use `sys.exit()`.\nThe remaining code will not be executed. \n\n\n```python\n\nimport sys \n\nsys.exit(1)\n\n```\n\n\nImagine doing these operations for different log formats, and message types\n– it can get complicated and error-prone very quickly. Maybe there is\nanother approach. \n\n\n### Parse log files using regular expressions\n\n\nThere are different syslog format RFCs – [RFC\n3164](https://datatracker.ietf.org/doc/html/rfc3164) is obsolete but still\nfound in the wild as default configuration (matching the pattern above),\nwhile [RFC 5424](https://datatracker.ietf.org/doc/html/rfc5424) is more\nmodern, including datetime with timezone information. Parsing this format\ncan be tricky, so let's ask Code Suggestions for advice. \n\n\nIn some cases, the suggestions include regular expressions. They might not\nmatch immediately, making the code more complex to debug, with trial and\nerrors. A good standalone resource to text and explain regular expressions\nis [regex101.com](https://regex101.com/).  \n\n\n**Tip:** You can skip diving deep into regular expressions using the\nfollowing code snippet as a quick cheat. The next step involves instructing\nCode Suggestions to use these log patterns, and help us extract all valuable\ncolumns. \n\n\n```python\n\n# Define the syslog log format regex in a dictionary\n\n# Add entries for RFC3164, RFC5424\n\nregex_log_pattern = {\n    'rfc3164': '([A-Z][a-z][a-z]\\s{1,2}\\d{1,2}\\s\\d{2}[:]\\d{2}[:]\\d{2})\\s([\\w][\\w\\d\\.@-]*)\\s(.*)$',\n    'rfc5424': '(?:(\\d{4}[-]\\d{2}[-]\\d{2}[T]\\d{2}[:]\\d{2}[:]\\d{2}(?:\\.\\d{1,6})?(?:[+-]\\d{2}[:]\\d{2}|Z)?)|-)\\s(?:([\\w][\\w\\d\\.@-]*)|-)\\s(.*)$;'\n}\n\n```\n\n\nWe know what the function should do, and its input parameters – the file\nname, and a log pattern to match. The log lines should be split by this\nregular expression, returning a key-value dictionary for each log line. The\nfunction should return a list of dictionaries. \n\n\n```python\n\n# Create a function that parses a log file\n\n# Input parameter: file path\n\n# Match log line against regex_log_pattern\n\n# Return the results as dictionary list: log line, pattern, extracted\ncolumns\n\n```\n\n\n![Code suggestion based on a multiline comment instruction to get a function\nthat parses a log file based on regex\npatterns](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_log_format_regex_function_instructions_01.png){:\n.shadow}\n\n\nRemember the indent for opening a new scope? The same applies for functions\nin Python. The `def` identifier requires a function name, and a list of\nparameters, followed by an opening colon. The next lines of code require the\nindent. VS Code will help with live-linting wrong indent, before the script\nexecution fails, or the CI/CD pipelines. \n\n\nContinue with Code Suggestions – it might already know that you want to\nparse all log files, and parse them using the newly created function. \n\n\n![Code suggestion to parse all log files, and print the result\nset](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_log_format_regex_function_instructions_02.png){:\n.shadow}\n\n\nA full working example can look like this: \n\n\n```\n\nimport os\n\n\n# Specify the path and file suffix in variables\n\npath = 'log-data/'\n\nfile_suffix = '.log'\n\n\n# Read all file paths in the directory into a list\n\nlog_files = []\n\nfor root, directories, files in os.walk(path):\n    for file in files:\n        if file.endswith(file_suffix):\n            log_files.append(os.path.join(root, file))\n\n# Define the syslog log format regex in a dictionary\n\n# Add entries for RFC3164, RFC5424\n\nregex_log_pattern = {\n    'rfc3164': '([A-Z][a-z][a-z]\\s{1,2}\\d{1,2}\\s\\d{2}[:]\\d{2}[:]\\d{2})\\s([\\w][\\w\\d\\.@-]*)\\s(.*)$',\n    'rfc5424': '(?:(\\d{4}[-]\\d{2}[-]\\d{2}[T]\\d{2}[:]\\d{2}[:]\\d{2}(?:\\.\\d{1,6})?(?:[+-]\\d{2}[:]\\d{2}|Z)?)|-)\\s(?:([\\w][\\w\\d\\.@-]*)|-)\\s(.*)$;'\n}\n\n\n# Create a function that parses a log file\n\n# Input parameter: file path\n\n# Match log line against regex_log_pattern\n\n# Return the results as dictionary list: log line, pattern name, extracted\ncolumns\n\nimport re\n\n\ndef parse_log_file(file_path):\n    # Read the log file\n    with open(file_path, 'r') as f:\n        log_lines = f.readlines()\n\n    # Create a list to store the results\n    results = []\n\n    # Iterate over the log lines\n    for log_line in log_lines:\n        # Match the log line against the regex pattern\n        for pattern_name, pattern in regex_log_pattern.items():\n            match = re.match(pattern, log_line)\n\n            # If the log line matches the pattern, add the results to the list\n            if match:\n                extracted_columns = match.groups()\n                results.append({\n                    'log_line': log_line,\n                    'pattern_name': pattern_name,\n                    'extracted_columns': extracted_columns,\n                    'source_file': file_path\n                })\n\n    # Return the results\n    return results\n\n# Parse all files and print results\n\nfor log_file in log_files:\n    results = parse_log_file(log_file)\n    print(results)\n```\n\n\nLet's unpack what the `parse_log_file()` function does:\n\n\n1. Opens the file from `file_path` parameter. \n\n2. Reads all lines into a new variable `log_lines`. \n\n3. Creates a results list to store all items. \n\n4. Iterates over the log lines. \n\n5. Matches against all regex patterns configured in regex_log_pattern. \n\n6. If a match is found, extracts the matching column values.\n\n7. Creates a results item, including the values for the keys `log_line`,\n`pattern_name`, `extracted_colums`, `source_file`. \n\n8. Appends the results item to the results list.\n\n9. Returns the results list. \n\n\nThere are different variations to this – especially for the returned result\ndata structure. For this specific case, log lines come as list already.\nAdding a dictionary object instead of a raw log line allows function callers\nto extract the desired information in the next step. Once a working example\nhas been implemented, you can refactor the code later, too. \n\n\n### Advanced log format: auth.log\n\n\nParsing the syslog on a Linux distribution might not unveil the necessary\ndata to analyze. On a virtual machine that exposes port 22 (SSH) to the\nworld, the authentication log is much more interesting – plenty of bots and\nmalicious actors testing default password combinations and often brute force\nattacks.\n\n\nThe following snippet from `/var/log/auth.log` on one of my private servers\nshows the authentication log format and the random attempts from bots using\ndifferent usernames, etc. \n\n\n```\n\nOct 15 00:00:19 ebpf-chaos sshd[3967944]: Failed password for invalid user\nubuntu from 93.254.246.194 port 48840 ssh2\n\nOct 15 00:00:20 ebpf-chaos sshd[3967916]: Failed password for root from\n180.101.88.227 port 44397 ssh2\n\nOct 15 00:00:21 ebpf-chaos sshd[3967944]: Received disconnect from\n93.254.246.194 port 48840:11: Bye Bye [preauth]\n\nOct 15 00:00:21 ebpf-chaos sshd[3967944]: Disconnected from invalid user\nubuntu 93.254.246.194 port 48840 [preauth]\n\nOct 15 00:00:24 ebpf-chaos sshd[3967916]: Failed password for root from\n180.101.88.227 port 44397 ssh2\n\nOct 15 00:00:25 ebpf-chaos sshd[3967916]: Received disconnect from\n180.101.88.227 port 44397:11:  [preauth]\n\nOct 15 00:00:25 ebpf-chaos sshd[3967916]: Disconnected from authenticating\nuser root 180.101.88.227 port 44397 [preauth]\n\nOct 15 00:00:25 ebpf-chaos sshd[3967916]: PAM 2 more authentication\nfailures; logname= uid=0 euid=0 tty=ssh ruser= rhost=180.101.88.227 \nuser=root\n\nOct 15 00:00:25 ebpf-chaos sshd[3967998]: Invalid user teamspeak from\n185.218.20.10 port 33436\n\n```\n\n\n**Tip for intrusion prevention:** Add a firewall setup, and use\n[fail2ban](https://en.wikipedia.org/wiki/Fail2ban) to block invalid auth\nlogins. \n\n\nThe next exercise is to extend the logic to understand the free form log\nmessage parts, for example `Failed password for invalid user ubuntu from\n93.254.246.194 port 48840 ssh2`. The task is to store the data in an\noptional dictionary with key value pairs. \n\n\nCreate a new function that takes the previously parsed log line results as\ninput, and specifically parses the last list item for each line.\n\n\n1. Count the number of `Failed password` and `Invalid user` messages.\n\n2. Return the results with count, log file, pattern \n\n\n![Code suggestions for a log file message parser to count auth.log\nfailures](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_parse_log_message_auth_log.png){:\n.shadow}\n\n\nA working suggestion can look like the following code:\n\n\n```python\n\n# Create a function that parses a log file message from the last\nextracted_columns entry \n\n# Input: Parsed log lines results list \n\n# Loop over all log lines in the list, and extract the last list item as\nmessage \n\n# Count failure strings in the message: Failed password, Invalid user \n\n# Return the results if failure count greater 0: log_file, count, failure\nstring\n\ndef parse_log_file_message(results):\n    failure_results = []\n\n    # Iterate over the log lines\n    for result in results:\n        # Extract the message from the last list item\n        message = result['extracted_columns'][-1]\n\n        # Count the number of failure strings in the message\n        failure_count = message.count('Failed password') + message.count('Invalid user')\n\n        # If the failure count is greater than 0, add the results to the list\n        if failure_count > 0:\n            failure_results.append({\n                'log_file': result['source_file'],\n                'count': failure_count,\n                'failure_string': message\n            })\n\n    # Return the results\n    return failure_results\n\n# Parse all files and print results\n\nfor log_file in log_files:\n    results = parse_log_file(log_file)\n    failure_results = parse_log_file_message(results)\n    print(failure_results)\n```\n\n\nThe algorithm follows the previous implementations: First, create a results\narray to store matching data. Then, iterate over the already parsed\nlog_lines in the list. Each log line contains the `extracted_columns` key,\nwhich holds the free-form message string at the end. The next step is to\ncall the string object function `count()` to count how many times a given\ncharacter sequence is contained in a string. The returned numbers are added\nup to the `failure_count` variable. If it is greater than zero, the result\nis added to the results list, including the `log_file`, `count` and\n`failure_string` key-value pairs. After returning the parsed log message\nresults, loop through all log files, parse them, and print the results\nagain. \n\n\nExecute the script to inspect the detected matches. Note that the data\nstructure can be optimized in future learning steps.\n\n\n```\n\npython3 log_reader.py\n\n\n[{'log_file': 'log-data/var/log/auth.log', 'count': 1, 'failure_string':\n'sshd[3967944]: Failed password for invalid user ubuntu from 93.254.246.194\nport 48840 ssh2'}, {'log_file': 'log-data/var/log/auth.log', 'count': 1,\n'failure_string': 'sshd[3967916]: Failed password for root from\n180.101.88.227 port 44397 ssh2'}, {'log_file': 'log-data/var/log/auth.log',\n'count': 1, 'failure_string': 'sshd[3967916]: Failed password for root from\n180.101.88.227 port 44397 ssh2'}, {'log_file': 'log-data/var/log/auth.log',\n'count': 1, 'failure_string': 'sshd[3967998]: Invalid user teamspeak from\n185.218.20.10 port 33436'}, {'log_file': 'log-data/var/log/auth.log',\n'count': 1, 'failure_string': 'sshd[3967998]: Failed password for invalid\nuser teamspeak from 185.218.20.10 port 33436 ssh2'}, {'log_file':\n'log-data/var/log/auth.log', 'count': 1, 'failure_string': 'sshd[3968077]:\nInvalid user mcserver from 218.211.33.146 port 50950'}]\n\n\n```\n\n\n### Parsing more types: Structured logging\n\n\nApplication developers can use the structured logging format to help machine\nparsers to extract the key value pairs. Prometheus provides this information\nin the following structure in syslog:\n\n\n```\n\nOct 17 19:00:10 ebpf-chaos prometheus[594]: ts=2023-10-17T19:00:10.425Z\ncaller=compact.go:519 level=info component=tsdb m\n\nsg=\"write block\" mint=1697558404661 maxt=1697565600000\nulid=01HCZG4ZX51GTH8H7PVBYDF4N6 duration=148.675854ms\n\nOct 17 19:00:10 ebpf-chaos prometheus[594]: ts=2023-10-17T19:00:10.464Z\ncaller=head.go:1213 level=info component=tsdb msg\n\n=\"Head GC completed\" caller=truncateMemory duration=6.845245ms\n\nOct 17 19:00:10 ebpf-chaos prometheus[594]: ts=2023-10-17T19:00:10.467Z\ncaller=checkpoint.go:100 level=info component=tsd\n\nb msg=\"Creating checkpoint\" from_segment=2308 to_segment=2309\nmint=1697565600000\n\nOct 17 19:00:10 ebpf-chaos prometheus[594]: ts=2023-10-17T19:00:10.517Z\ncaller=head.go:1185 level=info component=tsdb msg\n\n=\"WAL checkpoint complete\" first=2308 last=2309 duration=50.052621ms\n\n```\n\n\nThis format is easier to parse for scripts, because the message part can be\nsplit by whitespaces, and the assignment character `=`. Strings that contain\nwhitespaces are guaranteed to be enclosed with quotes. The downside is that\nnot all programming language libraries provide ready-to-use structured\nlogging libraries, making it harder for developers to adopt this format. \n\n\nPractice following the previous example to parse the `auth.log` format with\nadditional information. Tell Code Suggestions that you are expecting\nstructured logging format with key-value pairs, and which returned data\nstructure would be great:\n\n\n```python\n\n# Create a function that parses a log file message from the last\nextracted_columns entry \n\n# Input: Parsed log lines results list \n\n# Loop over all log lines in the list, and extract the last list item as\nmessage \n\n# Parse structured logging key-value pairs into a dictionary\n\n# Return results: log_file, dictionary \n\n```\n\n\n![Code suggestions for parsing structured logging format in the log file\nmessage\npart](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_propose_structured_logging_message_parser.png){:\n.shadow}\n\n\n### Printing results and formatting\n\n\nMany of the examples used the `print()` statement to print the content on\nthe terminal. Python objects in the standard library support text\nrepresentation, and for some types it makes more sense (string, numbers),\nothers cannot provide much details (functions, etc.). \n\n\nYou can also pretty-print almost any data structure (lists, sets,\ndictionaries) in Python. The JSON library can format data structures in a\nreadable format, and use a given spaces indent to draw the JSON structure on\nthe terminal. Note that we use the `import` statement here to bring\nlibraries into the current scope, and access their methods, for example\n`json.dumps`. \n\n\n```python\n\nimport json \n\nprint(json.dumps(structured_results, indent=4))\n\n```\n\n\n![Parsing log files into structured objects, example result after following\nthe\nexercises](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_terminal_parsing_logs_and_pretty_print_results.png)\n\n\nPractice with modifying the existing source code, and replace the code\nsnippets where appropriate. Alternatively, create a new function that\nimplements pretty printing.\n\n\n```python\n\n# Create a pretty print function with indent 4 \n\n```\n\n\n![Code suggestions for pretty-print\nfunction](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/vs_code_code_suggestions_pretty_print.png){:\n.shadow}\n\n\nThis idea works in a similar fashion with creating your own logger\nfunctions...but we have to stop learning and take a break. Before we\nconclude the first blog post in the learning series, let's ensure that CI/CD\nand dependencies are set up properly for future exercises and async\npractice. \n\n\n## Dependency management and continuous verification  \n\n\n### Pip and pyenv: Bringing structure into Python \n\n\nDependencies can be managed in the [`requirements.txt`\nfile](https://pip.pypa.io/en/stable/reference/requirements-file-format/),\nincluding optional version dependencies. Using `requirements.txt` file also\nhas the advantage of being the single source of truth for local development\nenvironments and running continuous builds with GitLab CI/CD. They can use\nthe same installation command:\n\n\n```shell\n\npip install -r requirements.txt\n\n```\n\n\nSome Linux distributions do not install the pip package manager by default,\nfor example, Ubuntu/Debian require to install the `python3-pip` package. \n\n\nYou can manage different virtual environments using\n[venv](https://docs.python.org/3/library/venv.html). This workflow can be\nbeneficial to install Python dependencies into the virtual environment,\ninstead of globally into the OS path which might break on upgrades. \n\n\n```shell\n\npip install virtualenv\n\nvirtualenv venv\n\nsource venv/bin/activate \n\n```\n\n\n### Automation: Configure CI/CD pipeline for Python\n\n\nThe [CI/CD pipeline](https://docs.gitlab.com/ee/ci/) should continuously\nlint, test, and build the code. You can mimic the steps from the local\ndevelopment, and add testing more environments and versions: \n\n\n1. Lint the source code and check for formatting errors. The example uses\n[Pyflakes](https://pypi.org/project/pyflakes/), a mature linter, and\n[Ruff](https://docs.astral.sh/ruff/ ), a fast linter written in Rust. \n\n2. Cache dependencies installed using the pip package manager, following the\ndocumentation for [Python caching in GitLab\nCI/CD](https://docs.gitlab.com/ee/ci/caching/#cache-python-dependencies).\nThis saves time and resources on repeated CI/CD pipeline runs.\n\n3. Use parallel matrix builds to test different Python versions, based on\nthe available container images on Docker Hub and their tags. \n\n\n```yaml\n\nstages:\n  - lint\n  - test\n\ndefault:\n  image: python:latest\n  cache:                      # Pip's cache doesn't store the python packages\n    paths:                    # https://pip.pypa.io/en/stable/topics/caching/\n      - .cache/pip\n  before_script:\n    - python -V               # Print out python version for debugging\n    - pip install virtualenv\n    - virtualenv venv\n    - source venv/bin/activate\n\nvariables:  # Change pip's cache directory to be inside the project\ndirectory since we can only cache local items.\n  PIP_CACHE_DIR: \"$CI_PROJECT_DIR/.cache/pip\"\n\n# lint template\n\n.lint-tmpl:\n  script:\n    - echo \"Linting Python version $VERSION\"\n  parallel:\n    matrix:\n      - VERSION: ['3.9', '3.10', '3.11', '3.12']   # https://hub.docker.com/_/python\n\n# Lint, using Pyflakes: https://pypi.org/project/pyflakes/ \n\nlint-pyflakes:\n  extends: [.lint-tmpl]\n  script:\n    - pip install -r requirements.txt\n    - find . -not -path './venv' -type f -name '*.py' -exec sh -c 'pyflakes {}' \\;\n\n# Lint, using Ruff (Rust): https://docs.astral.sh/ruff/ \n\nlint-ruff:\n  extends: [.lint-tmpl]\n  script:\n    - pip install -r requirements.txt\n    - ruff .\n```\n\n\n![GitLab CI/CD Python lint job view, part of matrix\nbuilds](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/gitlab_cicd_python_lint_job_log_view.png)\n\n\n## What is next \n\n\nFun fact: GitLab Duo Code Suggestions also helped writing this blog post in\nVS Code, knowing about the context. In the screenshot, I just wanted to add\na tip about [regex101](https://regex101.com/), and GitLab Duo already knew. \n\n\n![Writing the GitLab blog post in VS Code with support from GitLab Duo Code\nSuggestions](https://about.gitlab.com/images/blogimages/learn-python-with-ai-code-suggestions-getting-started/gitlab_duo_code_suggestions_helping_write_the_learning_python_ai_blog_post.png)\n\n\nIn an upcoming blog, we will look into advanced learning examples with more\npractical (log) filtering and parallel operations, how to fetch logs from\nAPI endpoints (CI/CD job logs for example), and more data analytics and\nobservability. Until then, here are a few recommendations for practicing\nasync.\n\n\n### Async learning exercises\n\n\n- Implement the missing `log_file_limit` variable check. \n\n- Print a summary of the results in Markdown, not only JSON format. \n\n- Extend the script to accept a search filter as environment variable.\nPrint/count only filtered results. \n\n- Extend the script to accept a date range. It might require parsing the\ndatetime column in a time object to compare the range. \n\n- Inspect a GitLab CI/CD pipeline job log, and download the raw format.\nExtend the log parser to parse this specific format, and print a summary. \n\n\n### Share your feedback\n\n\nWhich programming language are you learning or considering learning? Start a\nnew topic on our [community](/community/) forum or Discord and share your\nexperience.\n\n\nWhen you use [GitLab Duo](/gitlab-duo/) Code Suggestions, please share your\nthoughts and feedback [in the feedback\nissue](https://gitlab.com/gitlab-org/gitlab/-/issues/405152).\n",[9,719,888,803],{"slug":3005,"featured":6,"template":698},"learning-python-with-a-little-help-from-ai-code-suggestions","content:en-us:blog:learning-python-with-a-little-help-from-ai-code-suggestions.yml","Learning Python With A Little Help From Ai Code Suggestions","en-us/blog/learning-python-with-a-little-help-from-ai-code-suggestions.yml","en-us/blog/learning-python-with-a-little-help-from-ai-code-suggestions",{"_path":3011,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3012,"content":3018,"config":3023,"_id":3025,"_type":14,"title":3026,"_source":16,"_file":3027,"_stem":3028,"_extension":19},"/en-us/blog/mastering-gitlab-admin-tasks-with-gitlab-duo-chat",{"title":3013,"description":3014,"ogTitle":3013,"ogDescription":3014,"noIndex":6,"ogImage":3015,"ogUrl":3016,"ogSiteName":685,"ogType":686,"canonicalUrls":3016,"schema":3017},"Mastering GitLab admin tasks with GitLab Duo Chat","Learn how to use Chat to streamline administrative tasks on self-managed instances, improving efficiency and problem-solving capabilities.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749666405/Blog/Hero%20Images/GitLab_Duo_Blog_Hero_1800x945_r2_B__1_.png","https://about.gitlab.com/blog/mastering-gitlab-admin-tasks-with-gitlab-duo-chat","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Mastering GitLab admin tasks with GitLab Duo Chat\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"David O'Regan\"}],\n        \"datePublished\": \"2024-08-09\",\n      }",{"title":3013,"description":3014,"authors":3019,"heroImage":3015,"date":3020,"body":3021,"category":801,"tags":3022},[1399],"2024-08-09","As a GitLab administrator managing a self-hosted instance, you often face\ncomplex challenges that require innovative solutions. Enter [GitLab Duo\nChat](https://about.gitlab.com/gitlab-duo/) – your AI-powered assistant that\ncan significantly streamline your administrative tasks. In this article,\nwe'll explore how you can leverage GitLab Duo Chat to solve intricate\nproblems efficiently, using a real-world example of updating group\nmemberships across multiple groups.\n\n\n## The power of GitLab Duo Chat for admins\n\n\nGitLab Duo Chat is more than just conversational AI; it's a powerful tool\nthat can assist with complex administrative tasks. By providing\ncontext-aware suggestions and code snippets, Chat can help you navigate\nthrough GitLab's extensive feature set and underlying architecture.\n\n\n### Case study: Updating group memberships\n\n\nLet's dive into a scenario where an admin needs to add an administrator user\nto multiple [groups](https://docs.gitlab.com/ee/user/group/) – in this case,\n50,000 groups. This task, while conceptually simple, can be daunting due to\nits scale.\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/dBd957MK_DE?si=JYTzdRjVQHyB6rpl\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## Step-by-step problem-solving with GitLab Duo Chat\n\n\n* **Initial approach:** Our first instinct might be to use GitLab's API, but\nwith 50,000 groups, this could put significant strain on the system.\n\n* **Pivoting to the Rails console:** Instead, we can use the Rails console\nfor a more efficient solution. Here's where Chat comes in handy.\n\n* **Crafting the solution:** Let's walk through the process of creating a\nRuby script to accomplish our task:\n\n\n``` python\n\n\ndef add_admin_to_groups(parent_group, admin_user)\n     unless parent_group.members.exists?(user_id: admin_user.id)\n       parent_group.add_owner(admin_user)\n       puts \"Added #{admin_user.name} to #{parent_group.name}\"\n     end\n\n    parent_group.descendants.each do |subgroup|\n      add_admin_to_groups(subgroup, admin_user)\n    end\n  end\n\n# Usage\n\n\nadmin_user = User.find_by(username: 'mr_admin')\n\nparent_group = Group.find_by(path: 'top_group')\n\nadd_admin_to_groups(parent_group, admin_user)\n\n```\n\n\n* **Iterative refinement:** Through conversation with Chat, we refined this\nscript, addressing issues like method availability and parameter\nrequirements.\n\n\n### Key takeaways\n\n\n#### Non-deterministic responses\n\n\nGitLab Duo Chat may provide different answers to the same question,\nencouraging exploration of multiple solutions. As an example:\n\n\nAn admin asks, ***How can I improve CI/CD pipeline performance?***\n\n\nChat's first response could be:\n\n***Consider using GitLab CI/CD caching to speed up subsequent pipeline runs.\nHere's an example .gitlab-ci.yml snippet:***\n\n\n```\n\ncache:\n  key: ${CI_COMMIT_REF_SLUG}\n  paths:\n    - node_modules/    \n ```\n\n***This caches the `node_modules` directory, reducing build times for\nNode.js projects.***\n\n\nWhen asked again, Chat's second response could be: ***To improve CI/CD\nperformance, try parallelizing your jobs. Here's an example:***\n\n\n```\n\ntest:\n  parallel: 3\n```\n\n\n***This runs your test job across three parallel runners, potentially\nreducing overall pipeline duration.***\n\n\n### Error interpretation and debugging\n\n\nChat can help interpret error messages and suggest fixes. As an example, an\nadmin encounters an error: \"fatal: reference is not a tree: 8a7d460\". They\ncould ask Chat: ***I'm seeing this Git error. What does it mean and how can\nI fix it?***\n\n\nChat could respond with:\n\n\n***This error typically occurs when Git can't find a specific commit. It\ncould be due to a corrupted repository or incomplete fetch. Try these\nsteps:***\n\n* ***Run `git fsck` to check for corruption.***\n\n* ***If that doesn't work, try git fetch --all --prune to update all\nremotes.***\n\n* ***If the issue persists, you might need to re-clone the repository.***\n\n\n***Always back up your repository before attempting these fixes.***\n\n\n### Code optimization and refactoring\n\n\nChat can assist in optimizing and refactoring code for better performance.\n\n\nAn example: An admin wants to optimize a Gitaly configuration for a large\ninstance. They could ask Chat: ***How can I optimize Gitaly for an instance\nwith 10,000+ repositories?***\n\n\nChat could respond with: ***For a large instance, consider this optimized\nGitaly configuration:***\n\n\n```\n\n[concurrency]\n\nrpc_concurrency = 100\n\n\n[rate_limiting]\n\nrpc_rate_limit = 100\n\n\n[cgroups]\n\nmountpoint = '/sys/fs/cgroup'\n\nhierarchy_root = 'gitaly'\n\nmemory_bytes = 30000000000  # Adjust based on available memory\n\ncpu_shares = 1024\n\n```\n\n***This configuration increases concurrency, implements rate limiting to\nprevent overload, and uses cgroups (control groups) to manage resource\nallocation. Adjust values based on your specific hardware and needs.***\n\n\n## Best practices for using GitLab Duo Chat in admin tasks\n\n\n### Formulate queries clearly and provide context\n\n\nWhen using Chat, the quality of the response largely depends on the quality\nof your query. Clear, detailed questions that provide relevant context will\nyield more accurate and helpful answers.\n\n\nPoor query: ***Why is GitLab slow?***\n\n\nThis query lacks specifics and context, making it difficult for Chat to\nprovide a targeted response. \n\n\nA better query would be: ***Our GitLab instance with 5,000 users and 3,000\nprojects is experiencing slow response times, especially during peak hours\n(9-11 AM EST). CPU usage on the application servers spikes to 90%. How can\nwe diagnose and address this?***\n\n\nThis improved query provides crucial details:\n\n\n* scale of the instance (5,000 users, 3,000 projects)\n\n* nature of the problem (slow response times)\n\n* timing of the issue (peak hours, 9-11 AM EST)\n\n* observed symptoms (90% CPU spike)\n\n\nWith this information, Chat can provide more targeted advice.\n\n\nAn even better query would be: ***We're running GitLab 15.8.3 on a 3-node\ncluster (8 vCPUs, 32GB RAM each) with a separate PostgreSQL 13 database and\nRedis 6.2 instance. Our instance hosts 5,000 users and 3,000 projects. We're\nexperiencing slow response times (average 5s, up from our usual 1s) during\npeak hours (9-11 AM EST), primarily affecting merge request creation and\npipeline initiation. CPU usage on the application servers spikes to 90%,\nwhile database CPU remains under 60%. Gitaly CPU usage is around 70%. We've\nalready increased Puma workers to 8 per node. What additional diagnostics\nshould we run and what potential solutions should we consider?***\n\n\nThis query provides an extensive context, including:\n\n* GitLab version and infrastructure details\n\nspecific performance metrics (response time increase)\n\n* affected operations (merge requests, pipelines)\n\n* resource usage across different components\n\n* steps already taken to address the issue\n\n\nBy providing this level of detail, you enable Chat to:\n\n* understand the full scope of your environment\n\n* identify potential bottlenecks more accurately\n\n* suggest relevant diagnostic steps\n\n* propose solutions tailored to your specific setup\n\n\nAvoid recommending steps you've already taken.\n\n\nRemember, while GitLab Duo Chat is powerful, it's not omniscient. The more\nrelevant information you provide, the better it can assist you. By following\nthese guidelines, you'll get the most out of your interactions with Chat,\nleading to more effective problem-solving and administration of your GitLab\ninstance.\n\n\n### Use GitLab Duo Chat's suggestions as a starting point and refine\nincrementally\n\n\nChat is an excellent tool for getting started with complex tasks, but it's\nmost effective when used as part of an iterative process. Begin with a broad\nquestion, then use Chat's responses to guide your follow-up questions,\ngradually refining your understanding and solution.\n\n\n#### Initial query\n\n\nAdmin: ***How can I set up Geo replication for disaster recovery?***\n\n\nChat might respond with a basic setup guide, covering:\n\n- prerequisites for Geo setup\n\n- steps to configure the primary node\n\n- process for adding a secondary node\n\n- initial replication process\n\n\nThis provides a foundation, but complex setups like Geo often require more\nnuanced understanding. Here's how you might refine your queries:\n\n\n**- Follow-up Query 1**\n\n\nAdmin: ***How do I handle custom data in Geo replication?***\n\nThis question addresses a specific concern not covered in the initial\nsetup. \n\n\n**- Follow-up Query 2**\n\n\nAdmin: ***What's the best way to test failover without disrupting\nproduction?***\n\n\nThis query focuses on a critical operational concern. \n\n\n**- Follow-up Query 3**\n\n\nAdmin: ***Can you help me create a runbook for Geo failover?***\n\n\nThis final query aims to consolidate the gathered information into a\npractical guide. The benefits of this incremental approach:\n\n\n1. By breaking down the complex topic of Geo replication into smaller,\nfocused queries, you gain a more thorough understanding of the subject.\n\n2. Each follow-up question allows you to address specific concerns relevant\nto your environment, resulting in a more customized solution.\n\n3. The progression from setup to testing to creating a runbook ensures that\nyou're not just understanding the theory, but also preparing for real-world\nimplementation.\n\n4. The step-by-step process of refining your queries helps in better\nretention of the information, as you're actively engaging with the content.\n\n5. Follow-up questions often reveal aspects of the task you might not have\ninitially considered, leading to a more robust final solution.\n\n\n#### Best practices for incremental refinement\n\n\n- Start with broad questions to establish a foundation.\n\n- Use Chat's responses to identify areas that need more clarification or\ndetail.\n\n- Ask about specific scenarios or edge cases relevant to your environment.\n\n- Seek practical advice on implementation, testing, and maintenance.\n\n- Use your growing understanding to formulate increasingly specific and\ncomplex queries.\n\n- Periodically summarize your understanding and ask Chat to validate or\ncorrect it.\n\n\nRemember, while GitLab Duo Chat is a powerful tool, it's most effective when\ncombined with your own critical thinking and domain knowledge. Use it to\nguide your learning and decision-making process, but always validate\nimportant information and test solutions in a safe environment before\napplying them to production systems.\n\n\n### Potential applications for self-managed GitLab instances\n\n\n#### User management at scale\n\n\nAdmin: ***We need to onboard 1,000 new users from an acquired company. The\ndata is in a CSV file. How can we automate this process? I am an Admin, can\nI use a Python script for this?***\n\n\nChat: ***You can use GitLab's API for bulk user creation. Here's a Python\nscript to get you started:***\n\n\n```python\n\nimport requests\n\nimport csv\n\n\ndef create_user(username, email, name, password):\n    url = 'https://gitlab.example.com/api/v4/users'\n    headers = {'PRIVATE-TOKEN': 'your_access_token'}\n    data = {\n        'username': username,\n        'email': email,\n        'name': name,\n        'password': password,\n        'skip_confirmation': True\n    }\n    response = requests.post(url, headers=headers, data=data)\n    return response.status_code == 201\n\nwith open('new_users.csv', 'r') as f:\n    reader = csv.reader(f)\n    for row in reader:\n        username, email, name, password = row\n        if create_user(username, email, name, password):\n            print(f\"Created user: {username}\")\n        else:\n            print(f\"Failed to create user: {username}\")\n```\n\n***This script reads user data from a CSV file and creates users via the\nGitLab API. Remember to handle errors and rate limiting for large\nbatches.***\n\n\n#### Project and group administration\n\n\nAdmin: ***We need to implement a standardized project structure across our\norganization. How can we automate this? Assume I am an admin. Write me a\nscript to do this via the API.***\n\n\nChat: ***You can use GitLab's API to create a project template and then\napply it to new projects. Here's a bash script to create a new project with\na standard structure:***\n\n\n```\n\n#!/bin/bash\n\n\nTOKEN=\"your_access_token\"\n\nGROUP_ID=\"your_group_id\"\n\n\ncreate_project() {\n  curl --header \"PRIVATE-TOKEN: $TOKEN\" \\\n  --data \"name=$1&namespace_id=$GROUP_ID\" \\\n  \"https://gitlab.example.com/api/v4/projects\"\n}\n\n\nadd_file() {\n  curl --request POST --header \"PRIVATE-TOKEN: $TOKEN\" \\\n  --form \"branch=main\" \\\n  --form \"content=$2\" \\\n  --form \"commit_message=Add $1\" \\\n  \"https://gitlab.example.com/api/v4/projects/$3/repository/files/$1\"\n}\n\n\nPROJECT_ID=$(create_project \"New-Standardized-Project\" | jq '.id')\n\n\nadd_file \"README.md\" \"# Welcome to our standardized project\" $PROJECT_ID\n\nadd_file \".gitlab-ci.yml\" \"stages:\\n  - build\\n  - test\\n  - deploy\"\n$PROJECT_ID\n\nadd_file \"CONTRIBUTING.md\" \"## How to contribute to this project\"\n$PROJECT_ID\n\n\necho \"Project created with ID: $PROJECT_ID\"\n\n```\n\n\n***This script creates a new project with a README, a basic CI/CD\nconfiguration, and a CONTRIBUTING guide. You can expand this to include more\nfiles or even clone a template repository.***\n\n\n#### Performance optimization\n\n\nAdmin: ***Our merge request creation process is slow. How can we diagnose\nand improve this? Assume I am an admin.***\n\n\nChat: ***As an instance admin, there are a few potential areas you can\ninvestigate to diagnose and improve the merge request creation process:\n\nDatabase Performance: The provided quotes show some example SQL queries\nrelated to merge request metrics. Slow database queries can significantly\nimpact merge request creation performance. You should review the database\nperformance, indexes, and query plans for any queries involved in the merge\nrequest creation process.***\n\n\n- ***Background jobs: Merge request creation may involve background jobs\n(e.g., for data processing, notifications, etc.). If these jobs are backed\nup or running slowly, it can impact the overall merge request creation time.\nCheck the background job queues and workers.***\n\n- ***Code optimization: Analyze the code responsible for merge request\ncreation to identify any potential bottlenecks or inefficiencies. This could\ninclude things like unnecessary database queries, inefficient algorithms, or\nlack of caching.***\n\n- ***Resource utilization: Monitor the resource utilization (CPU, memory,\ndisk I/O) of the GitLab application servers during merge request creation.\nHigh resource utilization can lead to performance degradation.***\n\n- ***Network performance: If your GitLab instance is distributed across\nmultiple nodes or servers, network performance between these components can\nimpact merge request creation times. Check for any network bottlenecks or\nlatency issues.***\n\n- ***Caching: Ensure that appropriate caching mechanisms are in place and\nfunctioning correctly. Inefficient or incorrect caching can lead to\nunnecessary overhead during merge request creation.***\n\n- ***Load testing: Perform load testing to simulate real-world scenarios and\nidentify any performance bottlenecks or scalability issues that may arise\nunder high load.***\n\n- ***Logging and monitoring: Enable detailed logging and monitoring for the\nmerge request creation process. This can help identify specific areas of\nslowness or bottlenecks.***\n\n\n***To effectively diagnose and improve the merge request creation process,\nyou'll likely need to combine multiple techniques, such as profiling, code\nanalysis, database optimization, and load testing. Additionally, consulting\nthe GitLab documentation and community resources can provide valuable\ninsights and best practices specific to your GitLab version and\nconfiguration.***\n\n\n#### Troubleshooting complex issues\n\n\nAdmin: ***We're experiencing intermittent 502 errors on our GitLab instance.\nHow can we troubleshoot this? I am an instance admin, how can I check the\nlogs to find answers?***\n\n\nChat: ***To troubleshoot intermittent 502 errors on your GitLab instance as\nan admin, you can follow these steps:***\n\n\n***1. Check the main GitLab logs while reproducing the issue by running:***\n\n\n```\n\nsudo gitlab-ctl tail | tee /tmp/\u003Ccase-ID-and-keywords>.log\n\n```\n\n\n***2. Press Ctrl+C to stop tailing the logs.***\n\n\n***3. Run GitLabSOS to gather broader diagnostics about your instance:***\n\n\n```\n\nsudo /opt/gitlab/embedded/bin/gitlab-sos\n\n```\n\n\n***4. Use the fast-stats tool to analyze performance statistics from the\nlogs:***\n\n\n```\n\nsudo /opt/gitlab/embedded/bin/fast-stats\n/var/log/gitlab/gitlab-rails/production.log\n\n```\n\n\n***5.Look for relevant log entries using the correlation ID that is present\nin most requests.***\n\n\n***6. Check the Pages logs (gitlab_pages_error.log) for any specific errors\nrelated to Pages communication issues with the GitLab API or loopback\ntimeouts if using an AWS Network Load Balancer.***\n\n\n***7. Ensure your gitlab-secrets.json file is synced across all GitLab Pages\ninstances if using `domain_config_source=auto`.***\n\n\n***8. If using systemd, configure GitLab Pages to listen on an explicit IP\nand port to avoid /tmp being cleaned up.***\n\n\n## Get started with GitLab Duo Chat\n\n\nGitLab Duo Chat is a powerful ally for administrators of self-managed GitLab\ninstances. By leveraging its capabilities, you can tackle complex tasks more\nefficiently, learn new techniques, and ultimately become a more effective\nGitLab administrator.\n\n\nWe encourage you to experiment with Chat in your administrative workflows.\nRemember to use it responsibly and always verify the solutions it provides.\n\n\n> [Try GitLab Duo for free](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/).\n\n\n### Resources\n\n- [GitLab Duo documentation](https://docs.gitlab.com/ee/user/gitlab_duo/)\n\n- [GitLab Rails Console Cheat\nSheet](https://docs.gitlab.com/ee/administration/operations/rails_console.html)\n\n- [GitLab API documentation](https://docs.gitlab.com/ee/api/)\n\n- [10 best practices for using AI-powered GitLab Duo\nChat](https://about.gitlab.com/blog/10-best-practices-for-using-ai-powered-gitlab-duo-chat/)\n\n- [GitLab Duo Chat 101: Get more done on GitLab with our AI\nassistant](https://about.gitlab.com/blog/gitlab-duo-chat-101-get-more-done-on-gitlab-with-our-ai-assistant/)\n",[803,719,9,695,717],{"slug":3024,"featured":91,"template":698},"mastering-gitlab-admin-tasks-with-gitlab-duo-chat","content:en-us:blog:mastering-gitlab-admin-tasks-with-gitlab-duo-chat.yml","Mastering Gitlab Admin Tasks With Gitlab Duo Chat","en-us/blog/mastering-gitlab-admin-tasks-with-gitlab-duo-chat.yml","en-us/blog/mastering-gitlab-admin-tasks-with-gitlab-duo-chat",{"_path":3030,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3031,"content":3037,"config":3043,"_id":3045,"_type":14,"title":3046,"_source":16,"_file":3047,"_stem":3048,"_extension":19},"/en-us/blog/measuring-ai-effectiveness-beyond-developer-productivity-metrics",{"title":3032,"description":3033,"ogTitle":3032,"ogDescription":3033,"noIndex":6,"ogImage":3034,"ogUrl":3035,"ogSiteName":685,"ogType":686,"canonicalUrls":3035,"schema":3036},"Measuring AI effectiveness beyond developer productivity metrics ","AI assistants are here, yet measuring AI's impact on productivity isn’t figured out. Here’s why it’s a difficult problem and how GitLab is solving it.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749671994/Blog/Hero%20Images/AdobeStock_700757695.jpg","https://about.gitlab.com/blog/measuring-ai-effectiveness-beyond-developer-productivity-metrics","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Measuring AI effectiveness beyond developer productivity metrics \",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Taylor McCaslin\"}],\n        \"datePublished\": \"2024-02-20\",\n      }",{"title":3032,"description":3033,"authors":3038,"heroImage":3034,"date":3040,"body":3041,"category":801,"tags":3042},[3039],"Taylor McCaslin","2024-02-20","AI-powered productivity tools promise to boost productivity by automating repetitive coding and tedious tasks, as well as generating code. How organizations measure the AI impact of these productivity tools has yet to be truly figured out. GitLab is working on a solution: AI Impact is a dashboard grounded in [value stream analytics](https://about.gitlab.com/solutions/value-stream-management/) that will help organizations understand the effect of [GitLab Duo](https://about.gitlab.com/gitlab-duo/), our AI-powered suite of features, on their productivity. AI Impact is the culmination of what we’ve learned at GitLab about measuring the impact of AI, and we wanted to share those lessons with you.\n\n[A report for The Pragmatic Engineer](https://newsletter.getdx.com/p/developer-productivity-metrics-at-top-companies) shows that measuring productivity in general isn’t straightforward, with top engineering teams around the globe all using different metrics. If everyone has a different productivity metric to optimize, how do we even begin to measure the impact of AI productivity tools? Welcome to why measuring AI assistant productivity impact is difficult and commonly misses the mark.\n\n>  Follow the progress of our AI Impact dashboard and [share your feedback](https://gitlab.com/groups/gitlab-org/-/epics/12978).\n\n## Flawed productivity metrics\n\nSimplistic productivity metrics like lines of code contributed per day or acceptance rates of AI suggestions fail to capture downstream costs. For instance, GitClear, according to [an Infoworld article](https://www.infoworld.com/article/3712685/is-ai-making-our-code-stupid.html), “analyzed 153 million lines of changed code between January 2020 and December 2023 and now expects that code churn ('the percentage of lines that are reverted or updated less than two weeks after being authored') will double in 2024.\" Thus, simply measuring lines of code risks technical debt pileup and skill atrophy in developers.  \n\n## Indirect impacts are hard to quantify\n\nThe goal of AI developer tools is to remove toil, allowing developers to focus on higher value tasks like system architecture and design. But how much time is really saved this way versus spent reviewing, testing, and maintaining AI-generated code? These second-order productivity impacts are very difficult to accurately attribute directly to AI, which may give you a false sense of value. One solution to this is to choose who gets to use AI productivity tools carefully.\n\n## Focus should be on business outcomes\n\nUltimately, what matters is real-world business outcomes, not developer activity metrics. Tracking lead time, cycle time, production defects, and user satisfaction better indicate where bottlenecks exist. If AI tools generate usable code faster, and quality teams can’t keep up with changes, the end software product may decrease in quality and lead to customer satisfaction problems. Shipping more sounds great until it causes problems that take even more time, money, and effort to resolve. Measuring business outcomes is also difficult and these measurements frequently are lagging indicators of problems. Measuring quality defects, security issues, and application performance are all ways to identify business impact sooner. \n\n## The need to balance speed and quality\n\nWhile AI code generation has the potential to accelerate development velocity, it should not come at the cost of overall quality and maintainability. Teams must strike the right balance between velocity and writing maintainable, well-tested code that solves actual business problems. Quality should not be sacrificed purely to maximize productivity metrics. This is when measuring lines of code AI generates or number of AI suggestions developers accept can optimize for the problematic outcomes. More code doesn't necessarily mean higher quality or productivity. More code means more to review, test, and maintain – potentially slowing delivery down.\n\nLet’s look at an example: AI-generated code output is scoped to the area a developer is currently working on. Current AI tools lack the ability to assess the broader architecture of the application (amplified in a microservices architecture). This means that even if the quality of the generated code is good, it may lead to repetition and code bloat because it will be inserted into the area targeted rather than making wider systematic changes. This is problematic in languages that are architected with object-oriented languages that use DRY (don't-repeat-yourself) principles. This is an active area of research and we’re excited to adopt new approaches and technologies to increase the context awareness of our AI features.\n\nAcceptance rate can be particularly misleading, and unfortunately is becoming the primary way AI productivity tools measure success. Developers may accept an AI-generated suggestion but then need to heavily edit or rewrite it. Thus, the initial acceptance gives no indication of whether the suggestion was actually useful. Acceptance rate is fundamentally a proxy for AI assistant quality, yet it is misconstrued as a productivity measure. This is especially misleading when all vendors are measuring acceptance rate differently and marketing based on this number. GitLab intentionally does not use this kind of data in our marketing. What we’ve seen in practice is that developers use AI-generated code similar to how an actor uses a cue – they look at the generated code and say, \"oh, right, that's the nudge I needed, I'll take it from here.\" \n\n## Implementation and team dynamics play a key role\n\nHow productivity gains materialize depends on how AI tools are implemented and developer dynamics. If some developers distrust the technology or reviews become lax expecting AI to catch errors, quality may suffer. Additionally, introducing AI tools often necessitates changes to processes like code reviews, testing, and documentation. Productivity could temporarily decline as teams adjust to new workflows before seeing gains. Organizations must ensure that when implementing AI tools, that they allow teams time to figure out how it works and how it fits into their workflows, knowing that this trial-and-error period may lead to reduced productivity metrics before seeing productivity gains. \n\nTo get this balance right, it’s important to define the tasks that are highly accurate and consistent and train the team to use AI for those use cases (at least, at first). We know that AI code generation is useful for producing scaffolding, test generation, and syntax corrections, as well as generating documentation. Have teams start there and they will see better results and learn to use the tool more effectively. Remember you can’t measure AI’s impact in a week. You have to give teams time to find their rhythm with their AI assistants. \n\n## Challenges exist, but AI is the future\n\nNow that we’ve talked about the challenges of measuring AI impact and potential risks, we do want to say at GitLab we do believe AI has a huge role to play in the evolution of DevSecOps platforms. That’s why we’re building GitLab Duo. But we are not rushing into productivity measurement by showing acceptance rates, or lines of code generated. We believe these are a step backwards to previous ways of thinking about productivity. Instead we’re looking at the data we have within our unified DevSecOps platform to present a more complete picture of AI Impact.  \n\n## What to measure instead\n\nMeasuring the productivity impacts of AI developer tools requires nuance and a focus on end-to-end outcomes rather than isolated productivity metrics. For these reasons, simple quantitative metrics tend to miss the nuances of measuring productivity with AI developer tools. The key is to combine quantitative data from across the software development lifecycle (SDLC) with qualitative feedback from developers on how AI actually impacts their day-to-day experience and shapes long-term development practices. Only then can we get an accurate picture of the productivity gains these tools can offer. We view AI as an augmentor to DevSecOps adoption, rather than a replacement for doing things the right way. Organizations focusing on building the right muscles in their SDLC practice are the ones best positioned to actually take advantage of any potential gains in developer coding productivity.\n\nSo what metric should we use instead? At GitLab we already have [value stream analytics](https://about.gitlab.com/solutions/value-stream-management/), which examine the end-to-end flow of work from idea to production to determine where bottlenecks exist. Value stream analytics isn’t a single measurement, it’s the ongoing tracking of metrics like lead time, cycle time, deployment frequency, and production defects. This keeps the focus on business outcomes rather than developer activity. By taking a holistic view across code quality, collaboration, downstream costs, and developer experience, teams can steer these technologies to augment (rather than replace) human abilities over the long run. \n\n## Introducing GitLab's AI Impact approach\n\nGitLab has the whole picture being a unified DevSecOps platform that spans the entire SDLC. We built [Value Stream Management](https://about.gitlab.com/solutions/value-stream-management/) to empower teams with metrics and insights to ship better software faster. Blending GitLab [Value Stream Analytics](https://about.gitlab.com/solutions/value-stream-management/) and [DORA metrics](https://about.gitlab.com/solutions/value-stream-management/dora/), and GitLab Duo usage data, we can provide organizations with the complete picture of how AI is impacting their SDLC. We’re calling this dashboard AI Impact, and it’s coming in an upcoming release to measure GitLab Duo’s impact on productivity. Follow our progress and [share your feedback](https://gitlab.com/groups/gitlab-org/-/epics/12978). \n\n_Disclaimer: This blog contains information related to upcoming products, features, and functionality. It is important to note that the information in this blog post is for informational purposes only. Please do not rely on this information for purchasing or planning purposes. As with all projects, the items mentioned in this blog and linked pages are subject to change or delay. The development, release, and timing of any products, features, or functionality remain at the sole discretion of GitLab._",[803,9,951,717],{"slug":3044,"featured":91,"template":698},"measuring-ai-effectiveness-beyond-developer-productivity-metrics","content:en-us:blog:measuring-ai-effectiveness-beyond-developer-productivity-metrics.yml","Measuring Ai Effectiveness Beyond Developer Productivity Metrics","en-us/blog/measuring-ai-effectiveness-beyond-developer-productivity-metrics.yml","en-us/blog/measuring-ai-effectiveness-beyond-developer-productivity-metrics",{"_path":3050,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3051,"content":3056,"config":3061,"_id":3063,"_type":14,"title":3064,"_source":16,"_file":3065,"_stem":3066,"_extension":19},"/en-us/blog/meet-gitlab-duo-workflow-the-future-of-ai-driven-development",{"title":3052,"description":3053,"ogTitle":3052,"ogDescription":3053,"noIndex":6,"ogImage":817,"ogUrl":3054,"ogSiteName":685,"ogType":686,"canonicalUrls":3054,"schema":3055},"Meet GitLab Duo Workflow - the future of AI-driven development","Workflow, our autonomous AI agent, will transform the way teams build and ship software – our first bold step towards AI-driven DevSecOps.","https://about.gitlab.com/blog/meet-gitlab-duo-workflow-the-future-of-ai-driven-development","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Meet GitLab Duo Workflow - the future of AI-driven development\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"David DeSanto, Chief Product Officer, GitLab\"}],\n        \"datePublished\": \"2024-06-27\",\n      }",{"title":3052,"description":3053,"authors":3057,"heroImage":817,"date":3058,"body":3059,"category":801,"tags":3060},[1869],"2024-06-27","Imagine if software wrote itself. It seems like a distant future, but with ongoing advancements in large language models and GitLab’s unified AI-powered DevSecOps platform, that future is quickly coming into focus. During our [GitLab 17 launch event](https://about.gitlab.com/seventeen/), we announced GitLab Duo Workflow, an autonomous AI agent that will transform how teams build, secure, deploy, and monitor software.\n\nGitLab Duo Workflow moves beyond the current landscape of reactive, prompt-based AI assistants by creating an autonomous team member actively contributing to optimize every aspect of your software development lifecycle. Workflow distinguishes itself by leveraging GitLab’s unified data store, which seamlessly connects all relevant data, projects, repositories, and documentation. This allows Workflow to be an intelligent, always-on agent that constantly monitors your projects, anticipates potential production issues, automatically identifies and resolves vulnerabilities, optimizes your applications for peak performance, and streamlines onboarding by rapidly building customized remote development environments.\n\nAI is transforming how secure software is created, maintained, updated, deployed, and monitored, enabling organizations to ship more software than ever before. GitLab Duo Workflow represents our first bold step towards AI-driven DevSecOps. We aim to empower developers to focus on high-level problem-solving, innovation, and value creation, while [GitLab Duo](https://about.gitlab.com/gitlab-duo/) handles repetitive tasks and optimization behind the scenes.\n\n## The vision for GitLab Duo Workflow\nWith GitLab Duo Workflow, we are laser-focused on tackling several key use cases to automate and optimize the software development process from end to end.\n### 1. Development automated\n\nStraight from the IDE, GitLab Duo Workflow helps plan and prioritize tasks tailored to individual projects and defined organizational processes. Using the requirements from a particular work item (whether an epic, issue, or task), Workflow produces an implementation plan that developers can review and refine. Then, Workflow works through the plan, generating or rewriting code to accomplish and meet the defined requirements. Workflow accomplishes this by operating within a [GitLab Remote Development workspace](https://about.gitlab.com/blog/quick-start-guide-for-gitlab-workspaces/), allowing it to safely and securely evaluate, write, and test code changes. This also ensures that Workflow produces code that both meets the requirements as well as passes all CI pipeline tests, including security scans. If the pipeline fails, Workflow automatically addresses issues and troubleshoots as needed, ensuring only high-quality code that meets your organization's standards is created and committed to your projects.\n\nOnce ready, Workflow will automatically create a merge request outlining its code changes and go through your merge request approval processes, including engaging with the code reviewer or maintainer. You can even ask Workflow to review your code and have it comment on your merge requests just as a human code reviewer would today. Even better, Workflow will implement those suggestions for you if you'd like it to. And this is only the start.\n\n### 2. Intelligent continuous improvement\nGitLab Duo Workflow will analyze your codebase in real time, suggesting architectural optimizations for enhanced efficiency, performance, and cost savings. Furthermore, it will proactively identify opportunities for code refactoring to improve scalability and address technical debt by suggesting changes to developers or automatically implementing them in a sandbox environment. Additionally, Workflow will dynamically manage cloud resources to prevent overprovisioning and ensure your applications always meet their performance targets.\n\n### 3. Proactive security and compliance\nSecurity and compliance are top priorities for any organization. GitLab Duo Workflow will automatically ask developers to apply patches, refactor insecure code, and adapt to emerging threats in real time. Moreover, Workflow will continuously assess security risks associated with your applications and production environments and assist you with implementing mitigating controls.\n\n### 4. Self-optimizing performance\nGitLab Duo Workflow will incorporate sophisticated feedback loops for continuous learning and improvement. By analyzing data from monitoring tools, user interactions, and business outcomes, it will continuously refine its view of your codebase to ensure that your application architectures are always aligned with your business needs. As we see with all AI, Workflow will constantly improve, catching and fixing its own mistakes as it learns to be a partner in your organization.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://player.vimeo.com/video/967982166?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allowfullscreen=\"true\" title=\"GitLab Duo Workflow the future of AI-driven DevSecOps\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## The future of AI is now\nGitLab Duo Workflow represents an exciting leap forward, transitioning from AI that requires constant human prompting to AI that drives development workflows and processes, seeking human guidance when needed. With GitLab's unified, AI-driven interface spanning the DevSecOps lifecycle, organizations will be able to create a new generation of AI-powered applications with unparalleled speed, efficiency, and innovation, all while maintaining the highest standards of security and compliance. No tradeoffs.\n\nStay tuned for more updates and insights as we continue to push the boundaries of what's possible with AI in software development. Together, let's embrace the future of AI-driven DevSecOps and unlock the full potential of your teams and organizations.\n\n> If you are curious about AI-driven DevSecOps and want to be part of this journey, including access to the pre-release program, please [sign up for our GitLab Duo Workflow waitlist](https://forms.gle/5ppRuNVb8LwSPNVJA).",[803,9,951,1133,888],{"slug":3062,"featured":91,"template":698},"meet-gitlab-duo-workflow-the-future-of-ai-driven-development","content:en-us:blog:meet-gitlab-duo-workflow-the-future-of-ai-driven-development.yml","Meet Gitlab Duo Workflow The Future Of Ai Driven Development","en-us/blog/meet-gitlab-duo-workflow-the-future-of-ai-driven-development.yml","en-us/blog/meet-gitlab-duo-workflow-the-future-of-ai-driven-development",{"_path":3068,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3069,"content":3075,"config":3081,"_id":3083,"_type":14,"title":3084,"_source":16,"_file":3085,"_stem":3086,"_extension":19},"/en-us/blog/meet-regulatory-standards-with-gitlab",{"title":3070,"description":3071,"ogTitle":3070,"ogDescription":3071,"noIndex":6,"ogImage":3072,"ogUrl":3073,"ogSiteName":685,"ogType":686,"canonicalUrls":3073,"schema":3074},"Meet regulatory standards with GitLab security and compliance","Compliance is more than one-off audits; it's a continuous process of managing risk by implementing guardrails and monitoring specific metrics. Learn how with this comprehensive guide.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098739/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_282096522_securitycompliance.jpeg_1750098739024.jpg","https://about.gitlab.com/blog/meet-regulatory-standards-with-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Meet regulatory standards with GitLab security and compliance\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Abubakar Siddiq Ango\"}],\n        \"datePublished\": \"2023-08-17\",\n      }",{"title":3070,"description":3071,"authors":3076,"heroImage":3072,"date":3077,"body":3078,"category":693,"tags":3079,"updatedDate":3080},[1538],"2023-08-17","Guiding principles in the form of standards have consistently ensured the secure and reliable delivery of products and services to customers. \nThese standards, typically enforced by legally mandated organizations, regulate industries and prevent the spread of subpar products.\n\nIn the Information Technology (IT) sector, adhering to standards extends beyond the final product delivery; it encompasses the entire solution lifecycle. As every industry increasingly leverages various forms of technology to accelerate processes and boost efficiency, vast quantities of often sensitive data are generated, stored, and transmitted using IT tools and services. The improper handling of this data can cause severe consequences, potentially leading to financial losses in the [hundreds of millions of dollars](https://tech.co/news/data-breaches-updated-list).\n\nThis comprehensive guide explains global compliance standards and walks through how to meet regulatory standards with GitLab compliance and security policy management.\n\nArticle contents:\n\n- [Common IT compliance standards](#common-it-compliance-standards)\n- [Global and regional compliance standards](#global-and-regional-compliance-standards)\n    - [Country/regional regulations](#countryregional-regulations)\n    - [Industry-specific standards](#industry-specific-standards)\n- [Importance of continuous compliance](#importance-of-continuous-compliance)\n- [Regulatory compliance vs. self-imposed standards](#regulatory-compliance-vs.-self-imposed-standards)\n- [Compliance management](#compliance-management)\n- [Compliance management with GitLab](#compliance-management-with-gitlab)\n    - [Compliance frameworks and pipelines](#compliance-frameworks-and-pipelines)\n    - [Security policy management](#security-policy-management)\n        - [Scan execution policies](#scan-execution-policies)\n        - [Scan result policies](#scan-result-policies)\n        - [License approval policies](#license-approval-policies)\n    - [Audit management](#audit-management)\n        - [Preparing for audits](#preparing-for-audits)\n        - [Using GitLab audit logs effectively](#using-gitlab-audit-logs-effectively)\n        - [Audit events streaming](#audit-events-streaming)\n- [Best practices for compliance management](#best-practices-for-compliance-management)\n- [Learn more](#learn-more)\n\n## Common IT compliance standards\nRegulatory compliance standards take various forms and depend on the industry or region in which an organization operates. First, we will look at common compliance standards, followed by region and industry-specific standards.\n\n### HIPAA\n\nThe [Health Insurance Portability and Accountability Act (HIPAA)](https://www.hhs.gov/hipaa/index.html) is important legislation that has impacted the healthcare industry in the U.S. The main aim of HIPAA, passed in 1996, is to protect sensitive patient health information from being disclosed without the patient's consent or knowledge.\n\nIt is essential to safeguard patient privacy, ensure data security, and standardize electronic healthcare transactions. HIPAA has forced healthcare providers, insurers, and related entities to implement strict data protection measures, significantly reducing unauthorized access to medical records and enhancing patient trust.\n\n### GDPR\n\n[The General Data Protection Regulation (GDPR)](https://gdpr-info.eu/) is a significant European Union law that governs the protection of personal data. Implemented in 2018, GDPR establishes strict guidelines for organizations handling the personal information of EU residents. It grants individuals greater control over their data, including the right to access, rectify, and erase personal information held by companies. GDPR mandates that organizations obtain explicit consent before collecting or processing personal data and clearly explain the purpose of data collection. Non-compliance can result in substantial financial penalties.\n\nAlthough an EU regulation, GDPR has global implications, affecting any organization that processes EU residents' data. This legislation has prompted widespread changes in data handling practices and has heightened awareness of privacy issues worldwide.\n\n### NIST SSDF\n\nThe [NIST Secure Software Development Framework (SSDF)](https://csrc.nist.gov/Projects/ssdf) is a guide to help organizations make safer software. Created by the National Institute of Standards and Technology, it offers [basic practices for secure software development](https://about.gitlab.com/blog/comply-with-nist-secure-supply-chain-framework-with-gitlab/).\n\nThe SSDF focuses on four main areas: getting the organization ready, protecting the software, making well-secured software, and dealing with vulnerabilities. It helps companies think about security, including security protocols, during development and throughout the software supply chain.\n\nBy following these guidelines, organizations can create software with fewer weak points and handle problems more effectively. The SSDF is flexible and can work with different software development methods, making it useful for many organizations.\n\n### PCI DSS\n\nThe [Payment Card Industry Data Security Standard (PCI DSS)](https://www.pcisecuritystandards.org/) is a set of security rules for organizations that handle credit card information. Created by major credit card companies, it aims to protect cardholders' data and prevent fraud. PCI DSS requires businesses to build and maintain a secure network, protect cardholder data, use strong access control measures, regularly monitor and test networks, and maintain an information security policy. These rules apply to any company that accepts, processes, stores, or transmits credit card data.\n\nCompliance with PCI DSS is mandatory for these businesses, regardless of their size or transaction volume. By following these standards, companies can better safeguard sensitive financial information, reduce the risk of data breaches, and maintain customer trust. Regular audits ensure ongoing compliance with these important security measures.\n\n### ISO 27000\n\n[ISO/IEC 27000](https://www.iso.org/standard/iso-iec-27000-family) provides the foundational framework for the ISO/IEC 27000 family of standards, offering a comprehensive overview of information security management systems (ISMS). It establishes a standardized vocabulary by defining key terms and concepts, ensuring consistent understanding across organizations in the field of information security.\n\nThe standard outlines the core components and processes to establish and maintain an effective ISMS. This guidance enables organizations to systematically manage information security risks, protecting confidential data and intellectual property.\n\nAdherence to ISO/IEC 27000 allows organizations to build a robust ISMS, enhancing their resilience against cyber threats, safeguarding valuable information assets, and fostering stakeholder trust.\n\n> [Learn how GitLab can help you on your ISO 27001 compliance journey.](https://about.gitlab.com/blog/how-gitlab-can-support-your-iso-compliance-journey/)\n\n## Global and regional compliance standards\n\n### Country/regional regulations\n\nWhile compliance standards like HIPAA and GDPR are known globally, they are USA and EU standards respectively. They influence other regional standards around the globe but are only required for companies to adhere to where they handle data from, for example, the EU. Several countries have compliance standards that must be met if a company operates in such countries. Here are a few other country-specific standards:\n- [SOX](https://en.wikipedia.org/wiki/Sarbanes%E2%80%93Oxley_Act) (USA, Public companies): Sarbanes-Oxley Act. Mandates proper financial record-keeping and reporting for public companies.\n- [PIPEDA](https://www.priv.gc.ca/en/privacy-topics/privacy-laws-in-canada/the-personal-information-protection-and-electronic-documents-act-pipeda/) (Canada, Commercial businesses): Personal Information Protection and Electronic Documents Act. Governs how private sector organizations collect, use, and disclose personal information.\n- [PDPA](https://www.pdpc.gov.sg/overview-of-pdpa/the-legislation/personal-data-protection-act) (Singapore, All organizations): Personal Data Protection Act. Governs the collection, use, and disclosure of personal data by organizations.\n- [APPI](https://www.ppc.go.jp/files/pdf/Act_on_the_Protection_of_Personal_Information.pdf) (Japan, All industries): Act on the Protection of Personal Information. Regulates the use of personal information in Japan.\n- [LGPD](https://lgpd-brazil.info/) (Brazil, All industries): Lei Geral de Proteção de Dados. Brazil's data protection law is similar to GDPR.\n- [FISMA](https://www.cisa.gov/topics/cyber-threats-and-advisories/federal-information-security-modernization-act) (USA, Federal agencies): Federal Information Security Management Act. Defines a framework for managing information security for federal information systems.\n- [POPI Act](https://popia.co.za/) (South Africa, All sectors): The Protection of Personal Information Act promotes the protection of personal information processed by public and private bodies.\n- [PDPA](https://www.pwc.com/th/en/tax/personal-data-protection-act.html) (Thailand, All sectors): Personal Data Protection Act. Like GDPR, it regulates the collection, use, and disclosure of personal data in Thailand.\n- [PIPL](https://en.wikipedia.org/wiki/Personal_Information_Protection_Law_of_the_People%27s_Republic_of_China) (China, All sectors): Personal Information Protection Law. China's first comprehensive data protection law is similar to GDPR.\n- [NDPR](https://nitda.gov.ng/wp-content/uploads/2021/01/NDPR-Implementation-Framework.pdf) (Nigeria, All sectors): Nigeria Data Protection Regulation. Safeguards the rights of natural persons to data privacy. \n- [DIFC Data Protection Law](https://www.difc.ae/business/laws-and-regulations/legal-database/difc-laws/data-protection-law-difc-law-no-5-2020) (Dubai, Companies in Dubai International Financial Centre): Regulates the processing of personal data in the DIFC free zone.\n- [PDPA](https://www.pdp.gov.my/jpdpv2/laws-of-malaysia-pdpa/personal-data-protection-act-2010/?lang=en) (Malaysia, Commercial transactions): Personal Data Protection Act. Regulates the processing of personal data in commercial transactions.\n- [Privacy Act](https://www.ag.gov.au/rights-and-protections/privacy) (Australia, Government agencies, and some private sector organizations) regulates how personal information is handled by Australian government agencies and some private sector organizations.\n- [KVKK](https://www.kvkk.gov.tr/Icerik/6649/Personal-Data-Protection-Law) (Turkey, All sectors): Turkish Personal Data Protection Law. Regulates the processing of personal data and protects individual rights.\n\nThese standards reflect the growing global concern for data privacy and security. Many countries are developing their own frameworks inspired by established regulations like GDPR. Each standard is tailored to the specific legal, cultural, and economic context of its country.\n\n### Industry-specific standards\n\n- [PCI DSS](https://www.pcisecuritystandards.org/) (Financial Services): The Payment Card Industry Data Security Standard applies to all organizations that handle credit card information globally.\n- [ISO 27001](https://www.iso.org/standard/iso-iec-27000-family) (All industries) is an Information Security Management System standard that provides a framework for information security management practices.\n- [GAMP 5](https://qbdgroup.com/en/blog/gamp-categories/) (Pharmaceutical): Good Automated Manufacturing Practice. Guidelines for computerized systems in pharmaceutical manufacturing.\n- [ISO 13485](https://www.iso.org/standard/59752.html) (Medical Devices): Specifies requirements for a quality management system for medical device manufacturers.\n- [COBIT](https://www.isaca.org/resources/cobit) (IT Management): Control Objectives for Information and Related Technologies. Framework for IT management and IT governance.\n- [ITIL](https://en.wikipedia.org/wiki/ITIL) (IT Services) is an Information Technology Infrastructure Library, a set of detailed practices for IT service management.\n- [NIST CSF](https://www.nist.gov/cyberframework) (Cybersecurity): National Institute of Standards and Technology Cybersecurity Framework. Guidance on managing and reducing cybersecurity risk.\n- [WCAG](https://www.w3.org/WAI/standards-guidelines/wcag/) (Web Accessibility): The Web Content Accessibility Guidelines aim to make web content more accessible to people with disabilities.\n- [Basel III](https://www.bis.org/bcbs/basel3.htm) (Banking) is the international regulatory framework for banks, including IT risk management requirements.\n- [TISAX](https://portal.enx.com/en-US/TISAX/) (Automotive): Trusted Information Security Assessment Exchange. Information security assessment and exchange mechanism for the automotive industry. (Learn how [GitLab's TISAX certification](https://about.gitlab.com/blog/gitlab-drives-automotive-industry-information-security-with-tisax/) helps customers in the automotive industry.)\n\nThese standards apply across national boundaries to specific industries or aspects of IT, ensuring consistent practices and security measures globally within their respective domains.\n\n## Importance of continuous compliance\nOrganizations need to implement systems that ensure compliance with relevant regulatory requirements and can achieve this with continuous compliance. [Continuous software compliance](https://about.gitlab.com/solutions/compliance/) is essential to every industry, as it provides ongoing monitoring, assessment, and adjustment of an organization's systems, processes, and practices to ensure they consistently meet relevant standards and regulations.\n\nContinuous compliance is not just a regulatory checkbox but a strategic necessity for software development today. It empowers organizations to proactively navigate emerging threats, technological shifts, and regulatory changes while fostering stakeholder trust, operational efficiency, and competitive advantage in an increasingly complex business environment.\n\n## Regulatory compliance vs. self-imposed standards\n\nRegulatory compliance and self-imposed standards are two distinct approaches to organizational governance. Regulatory compliance involves adhering to mandatory laws and regulations set by external authorities, which have a broad scope and potential legal consequences for non-compliance. It focuses on meeting minimum legal requirements and is generally less flexible. Examples include GDPR, HIPAA, and SOX.\n\nIn contrast, self-imposed standards are voluntary guidelines adopted by organizations to improve quality, security, or performance. These can be tailored to specific needs, are highly adaptable, and typically aim to exceed minimum requirements. While failure to meet self-imposed standards may impact reputation, it usually doesn't have legal ramifications.\nThe key differences lie in their origin, motivation, adaptability, and scope. Many organizations implement both approaches to create a comprehensive quality, security, and performance management strategy.\n\n## Compliance management\n\nTo meet standards, organizations must evaluate the right [compliance metrics](https://advisory.kpmg.us/articles/2018/compliance-metrics.html) and integrate them into their standard operating procedures to provide insights that enable early detection and prevention of current and future compliance risks. Thus, there is a need for efficient [compliance management](https://about.gitlab.com/solutions/compliance/). Compliance management goes beyond checking off a checklist periodically; it's a comprehensive organization-wide continuous process.\n\n## Compliance management with GitLab\n\nGitLab provides features that allow organizations to create [compliance frameworks](https://docs.gitlab.com/ee/user/group/compliance_frameworks.html), [security policies](https://docs.gitlab.com/ee/user/application_security/policies/), and [audit management](https://docs.gitlab.com/ee/administration/audit_reports.html). GitLab also enables compliance or leadership teams to monitor compliance metrics with [compliance reports](https://docs.gitlab.com/ee/user/compliance/compliance_report/index.html). Let's take a look at some of these features.\n\n### Compliance frameworks and pipelines\n\nOrganizations can create a [compliance framework](https://docs.gitlab.com/ee/user/group/compliance_frameworks.html) that identifies projects in GitLab that must meet defined compliance requirements, which can be enforced with [compliance pipelines](https://docs.gitlab.com/ee/user/group/compliance_pipelines.html). For example, a FinTech company can create a [default compliance framework](https://docs.gitlab.com/ee/user/group/compliance_frameworks.html#default-compliance-frameworks) for all projects, ensuring every stage of their software development lifecycle meets the PCI DSS requirements for handling cardholder data.\n\nThese requirements are then enforced by ensuring every change introduced to the codebase is sufficiently tested automatically with GitLab's [application security features](https://docs.gitlab.com/ee/user/application_security/), which cover source code, dependencies, licenses, vulnerabilities in running application and infrastructure configurations. You can learn more about how GitLab helps you achieve PCI compliance and other [regulatory compliance](https://about.gitlab.com/solutions/continuous-software-compliance/) with compliance frameworks.\n\nThe following videos demonstrate setting up and using compliance frameworks and pipelines.\n\n**Video tutorial: Create compliance frameworks**\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/IDswzRI-8VQ\" title=\"How to setup Compliance Frameworks &amp; Pipelines in GitLab\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n**Video tutorial: Enforce compliance pipelines**\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/jKA_e_jimoI\" title=\"GitLab Compliance Pipelines &amp; Overriding Settings\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### Security policy management\n\nSecurity and compliance teams can use GitLab to enforce compliance requirements by ensuring security scanners run in certain pipelines or require approval on merge requests when [security policies](https://docs.gitlab.com/ee/user/application_security/policies/) are violated. GitLab supports [scan execution](https://docs.gitlab.com/ee/user/application_security/policies/scan-execution-policies.html) and [scan result](https://docs.gitlab.com/ee/user/application_security/policies/scan-result-policies.html) policies. These policies are defined in a dedicated [security policy project](https://docs.gitlab.com/ee/user/application_security/policies/#security-policy-project) that separates duties between security and development teams. Security policies can be applied granularly at the group, sub-group, and project levels. The policies can be edited in rule mode, which uses the [policy editor](https://docs.gitlab.com/ee/user/application_security/policies/#policy-editor), or by yaml mode.\n\n#### Scan execution policies\nScan execution policies can be configured to run on a specified [GitLab Runner](https://docs.gitlab.com/runner/), including the following:\n-  [Static Application Security Testing](https://docs.gitlab.com/ee/user/application_security/sast/)\n- [Infrastructure as \nCode](https://docs.gitlab.com/ee/user/application_security/iac_scanning/)\n- [Dynamic Application Security Testing](https://docs.gitlab.com/ee/user/application_security/dast/)\n- [Secret Detection](https://docs.gitlab.com/ee/user/application_security/secret_detection/)\n- [Container Scanning](https://docs.gitlab.com/ee/user/application_security/container_scanning/)\n- [Dependency Scanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/) \n\nThe scan jobs can be run on schedule or anytime a pipeline runs. Compliance and security teams can use scan execution policies to periodically check on and proactively prevent vulnerability escalation as part of a vulnerability management strategy. They can also reinforce controls when new trends are observed from scan results.\n\n**Video tutorial: Set up security scan policies in GitLab**\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/ZBcqGmEwORA\" title=\"How to set up security scan policies in GitLab\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n#### Scan result policies\n\nScan result policies add required review and approval for merge requests when the results of specified security scans violate the policies' rules. For example, a policy can require a security team member to take action when a newly identified critical SAST vulnerability is detected. This way, security and compliance team members can support developers while ensuring the changes introduced to the codebase are secure and meet compliance requirements.\n\n**Video tutorial: Overview of GitLab Scan Result Policies**\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/w5I9gcUgr9U\" title=\"Overview of GitLab Scan Result Policies\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n#### License approval policies\nWhen selecting scan types for scan result policy rules, you can choose between security scan, the default behavior for scan result policies, and license scan, which helps ensure license compliance. License scanning depends on the output of the [dependency scanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/) [CI/CD](https://about.gitlab.com/topics/ci-cd/) job to check if identified licenses match specified criteria, then adds approval requirements before an open merge request can be merged. This is crucial to ensure that only dependencies with approved licenses are used in your organization.\n\n**Video demo: License approval policies**\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/34qBQ9t8qO8\" title=\"License Approval Policies Demo\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### Audit management\n#### Preparing for audits \nAudits are essential for compliance management because they allow you to understand your organization's security and compliance posture. External audits required by regulators are often detailed and exhaustive. To prepare for them, organizations need to:\n- Determine which regulations or standards will be assessed and what areas of the organization will be examined.\n- Analyze past audit results and ensure any previously identified issues have been addressed.\n- Collect all relevant policies, procedures, and records that demonstrate compliance.\n- Before the official audit, an internal audit must be performed to identify and address any potential compliance gaps.\n- Brief employees on the audit process and their roles. Conduct training if necessary.\n- Ensure all required documentation is easily accessible and well-organized.\n- Ensure all compliance-related policies and procedures are up-to-date and aligned with current regulations.\n- Verify that all technical safeguards and controls are in place and functioning correctly.\n- Identify key personnel who may be interviewed and brief them on potential questions.\n- Address known issues: If any known compliance issues exist, develop and document plans to address them.\n\nTo enable your preparedness, GitLab features: [Audit Events](https://docs.gitlab.com/ee/administration/audit_events.html) and [Compliance Center](https://docs.gitlab.com/ee/user/compliance/compliance_center/index.html) give a detailed view of an organization's compliance.\n\n#### Using GitLab audit logs effectively \n\nYou want to know every action taken on the GitLab instance with [audit events](https://docs.gitlab.com/ee/administration/audit_events.html). Audit reports allow you to track every significant event, who performed it, and when. You can also generate detailed reports from audit events using [audit reports](https://docs.gitlab.com/ee/administration/audit_reports.html), allowing you to prove your compliance to auditors or regulators.\n\n![Audit events](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098755/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098755493.png)\n\n[The compliance center](https://docs.gitlab.com/ee/user/compliance/compliance_report/index.html#compliance-violations-report) is a significant component of audit management in GitLab, giving visibility to your organization's compliance posture. Compliance reports detail every violation discovered with the [compliance violations report](https://docs.gitlab.com/ee/user/compliance/compliance_center/compliance_violations_report.html) and the frameworks used by projects within your organization with the compliance [frameworks report](https://docs.gitlab.com/ee/user/compliance/compliance_center/compliance_frameworks_report.html).\n\n![Meet regulatory requirements - image 2](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098756/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098755493.png)\n\n\u003Ccenter>\u003Ci>Example of a compliance violations report from a parent GitLab group.\u003C/i>\u003C/center>\n\n![Meet regulatory requirements - image 3](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098755/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750098755495.png)\n\n\u003Ccenter>\u003Ci>Example of a compliance framework report for all projects in a group\u003C/i>\u003C/center>\n\n#### Audit events streaming\nMost organizations have existing systems to monitor activities in their systems in real-time. With [audit events streaming](https://docs.gitlab.com/ee/administration/audit_event_streaming/index.html) on GitLab, you can integrate third-party solutions like Splunk infrastructure monitoring or DataDog streams monitoring service for real-time audit events analytics. All audit events data are sent to the streaming destination (it's essential to stream to a trusted service). Audit events streaming can be [configured at top-level groups](https://docs.gitlab.com/ee/administration/audit_event_streaming/index.html#top-level-group-streaming-destinations) and at the [instance level](https://docs.gitlab.com/ee/administration/audit_event_streaming/#instance-streaming-destinations) for self-managed GitLab instances.\n\n## Best practices for compliance management \n\nHere are some best practices for effective compliance management:\n- Establish a strong compliance culture that promotes organizational compliance awareness and ensures leadership commitment and support.\n- Develop a comprehensive compliance program with clear policies and procedures and regularly review the program to reflect regulation changes.\n- Implement risk assessment and management to regularly identify and assess compliance risks, prioritizing risks based on potential impact and likelihood.\n- Conduct regular compliance training tailored to specific roles and responsibilities for all employees.\n- Implement compliance management to automate compliance monitoring and compliance reporting where possible.\n- Perform internal audits to identify gaps and areas for improvement. It is also essential to consider external audits unbiasedly and use audit results to refine and improve compliance processes.\n- Stay informed about regulatory changes by assigning responsibility for monitoring regulatory updates and participating in industry associations and forums.\n- Integrate compliance into business processes, embed compliance checks into operational workflows, and consider compliance implications in strategic decision-making. Align compliance goals with business objectives\n- Develop response plans for potential compliance breaches and conduct mock scenarios to test readiness for incidents and violations.\n\n## Learn more\nCompliance is a continuous process of efficiently managing risk by implementing guardrails and monitoring compliance metrics. GitLab empowers organizations to fulfill regulatory standards with our [compliance management](https://about.gitlab.com/solutions/compliance/) features. With GitLab, you can improve the software supply chain experience, build more secure software faster, and maintain the trust of your users, clients, and community.\n\n> Learn more about compliance and security policy management with the [GitLab DevSecOps tutorial](https://gitlab-da.gitlab.io/tutorials/security-and-governance/devsecops/simply-vulnerable-notes/), which contains lessons covering the complete application security lifecycle in GitLab.\n\n## Read more\n- [Introducing GitLab Dedicated for Government](https://about.gitlab.com/blog/introducing-gitlab-dedicated-for-government/)\n- [How to ensure separation of duties and enforce compliance with GitLab](https://about.gitlab.com/blog/ensuring-compliance/)\n- [The ultimate guide to least privilege access with GitLab](https://about.gitlab.com/blog/the-ultimate-guide-to-least-privilege-access-with-gitlab/)",[951,9,693,186],"2024-08-22",{"slug":3082,"featured":6,"template":698},"meet-regulatory-standards-with-gitlab","content:en-us:blog:meet-regulatory-standards-with-gitlab.yml","Meet Regulatory Standards With Gitlab","en-us/blog/meet-regulatory-standards-with-gitlab.yml","en-us/blog/meet-regulatory-standards-with-gitlab",{"_path":3088,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3089,"content":3095,"config":3100,"_id":3102,"_type":14,"title":3103,"_source":16,"_file":3104,"_stem":3105,"_extension":19},"/en-us/blog/migrating-from-bamboo-to-gitlab-cicd",{"title":3090,"description":3091,"ogTitle":3090,"ogDescription":3091,"noIndex":6,"ogImage":3092,"ogUrl":3093,"ogSiteName":685,"ogType":686,"canonicalUrls":3093,"schema":3094},"How to migrate from Bamboo to GitLab CI/CD","With the fast approaching EOL of Atlassian Server products, including Bamboo, this blog post explains how users of Bamboo can migrate to GitLab CI/CD.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749658924/Blog/Hero%20Images/securitylifecycle-light.png","https://about.gitlab.com/blog/migrating-from-bamboo-to-gitlab-cicd","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"How to migrate from Bamboo to GitLab CI/CD\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Abubakar Siddiq Ango\"}],\n        \"datePublished\": \"2023-10-26\",\n      }",{"title":3090,"description":3091,"authors":3096,"heroImage":3092,"date":3097,"body":3098,"category":1277,"tags":3099},[1538],"2023-10-26","_Atlassian is ending support for all Server products in February 2024.\n[Learn more about the benefits of migrating from Atlassian to GitLab\nhere](https://about.gitlab.com/move-to-gitlab-from-atlassian/)._\n\n\nThe clock is ticking towards the February 15, 2024, [end-of-life date\nAtlassian has set for their Server\nproducts](https://about.gitlab.com/blog/atlassian-server-ending-move-to-a-single-devsecops-platform/).\nUsers whose workflows rely on on-premises deployments of Atlassian Server\nproducts are faced with the choice of upgrading to Atlassian’s Data Center\nor Cloud products or finding alternatives. \n\n\nOne of the Atlassian products that is impacted is Bamboo, a CI/CD solution.\nWhether you're simply looking for a new CI/CD tool or you're looking to\nconsolidate your entire toolchain, the Atlassian Server end of life is a\ngreat opportunity to make the move to GitLab to take advantage of the\nautomation, scalability, and security of an end-to-end DevSecOps platform.\n\n\nIn this blog post, we'll cover the steps you can take to migrate your Bamboo\nCI/CD setup to GitLab CI/CD.\n\n\n## How is GitLab CI/CD different from Bamboo?\n\n\n### Organization\n\n\nBamboo is structured around Projects and Plans. CI/CD jobs are grouped into\nstages, which are defined in a Bamboo plan along with other configurations\nthat determine how jobs run. Bamboo projects are used to organize plans,\nwhich are classified into Build and Deployment plans. \n\n\nAs the name implies, Build plans can be configured to pull code from\nconfigured repositories and generate artifacts. These artifacts are picked\nby jobs defined in Deployment plans and are deployed to environments\nconfigured in Bamboo. Bamboo jobs are also composed of tasks, which can be a\nscript, a task to pull code from a repository, or a task specific to a\ntechnology.\n\n\nYou also need to add code repositories to a Bamboo plan or a project, making\nit available to all plans under it, and set triggers for how Bamboo detects\nchanges and runs builds.\n\n\nGitLab is organized differently. Everything is in a single platform, with\nyour CI/CD configuration provided as part of your code in a `.gitlab-ci.yml`\nfile, from a group’s compliance pipeline configuration, or Auto DevOps when\nenabled and the `.gitlab-ci.yml` file is not found in a project.\n\n\nGitlab CI/CD configurations are composed of jobs, grouped into stages. How\nthe jobs are triggered can be controlled by CI/CD `rules` and there is no\nseparate configuration for deployments. Deployment jobs can be defined in\nthe same CI/CD script in a `deploy` stage, with the [deployment\nenvironment](https://docs.gitlab.com/ee/ci/environments/) set.\n\n\n### Agents vs Runners\n\n\nBamboo uses\n[Agents](https://confluence.atlassian.com/confeval/development-tools-evaluator-resources/bamboo/bamboo-remote-agents-and-local-agents)\n\nto run builds and deployments. These can be local agents running on the\nBamboo server or\n\nremote agents running external to the server. GitLab uses a similar concept\nto agents,\n\ncalled [GitLab Runner](https://docs.gitlab.com/runner/), which uses\n[executors](https://docs.gitlab.com/runner/executors/)\n\nto run builds. Examples of executors include SSH, Docker, and Kubernetes.\nYou can choose to\n\nuse GitLab [SaaS runners](https://docs.gitlab.com/ee/ci/runners/) or deploy\nyour own [self-managed\nrunners](https://docs.gitlab.com/runner/install/index.html).\n\n\n### Bamboo Specs vs .gitlab-ci.yml file\n\n\nBamboo is largely configured via the Bamboo UI but can also be configured as\ncode using Bamboo Specs. Bamboo Specs can be defined using Java and other\nJVM languages or using YAML, with Java having more complete feature coverage\nthan YAML. Bamboo Specs can be defined and stored in spec repositories, then\nlinked to Bamboo projects.\n\n\nThe `.gitlab-ci.yml` file is central to the CI/CD workflow in GitLab. When\nincluded in a project, the defined configurations are executed against the\nproject; otherwise, [Auto\nDevOps](https://docs.gitlab.com/ee/topics/autodevops/) automatically builds\nand deploys your application, when enabled. Templates and CI/CD components\ncan also be added to `.gitlab-ci.yml` for complex use cases.\n\n\n## How GitLab steps up your workflow\n\n\nIn addition to building and deploying your application, GitLab provides a\nsuite of features that allows for building secure applications fast and\nefficiently. These include:\n\n\n- [Application\nsecurity](https://docs.gitlab.com/ee/user/application_security/): GitLab\nanalyzes your application across the stages of the software development\nlifecycle with security scans such as Static Application Security Testing\n(SAST), Secret Detection, Infrastructure as Code (IaC) Scanning, Dependency\nScanning, License Scanning, Coverage-guided Fuzz Testing, Container\nScanning, API Security, Dynamic Application Security Testing (DAST), and\nOperational Security Scanning.\n\n- Compliance and security policies: Understanding the results of security\nscans and putting policies in place is crucial to ensuring secure\napplications. You can set up Scan Execution or Result policies to ensure\nadditional scans or approval requirements are added to comply with\nregulatory or self-imposed requirements.\n\n- [CI/CD catalog](https://docs.gitlab.com/ee/ci/components/catalog.html):\nParts of CI/CD configurations that are used across multiple projects can be\nturned into\n[components](https://docs.gitlab.com/ee/ci/components/index.html#components-repository)\nstored in component repositories that are discoverable in the CI/CD catalog.\n\n- Packages and registries: Custom or local replicas of popular packages can\nbe hosted with the [GitLab Package\nRegistry](https://docs.gitlab.com/ee/user/packages/package_registry/index.html).\nYou can also host container images with the [GitLab Container\nRegistry](https://docs.gitlab.com/ee/user/packages/container_registry/index.html)\nand Terraform modules with the [GitLab Terraform Module\nRegistry](https://docs.gitlab.com/ee/user/packages/terraform_module_registry/index.html).\nIf you frequently use public images or packages, you can use the [Dependency\nProxy](https://docs.gitlab.com/ee/user/packages/dependency_proxy/index.html)\nto maintain a local cache.\n\n\n[Learn more about other ways to automate your entire workflow with GitLab\nCI/CD](https://about.gitlab.com/solutions/continuous-integration/).\n\n\n## Convert Bamboo Specs to .gitlab-ci.yml script\n\n\nFor the purpose of this blog post, we will focus on [Bamboo YAML\nSpecs](https://docs.atlassian.com/bamboo-specs-docs/9.3.0/specs.html?yaml).\nYou can export your Bamboo Plans as YAML Spec — [learn more\nhere](https://confluence.atlassian.com/bamboo/exporting-existing-plan-configuration-to-bamboo-yaml-specs-1018270696.html).\nNow, lets walk through converting your Bamboo YAML Specs into GitLab CI/CD\nconfiguration.\n\n\n### Container image\n\n\nFirst is defining the container image of the container your jobs will run\nin. By default, Bamboo uses Agents, which depend on how the host machines\nare configured. You can replicate the Agent’s environment into a container\nimage hosted in the [GitLab Container\nRegistry](https://docs.gitlab.com/ee/user/packages/container_registry/).  \n\n\nIf you already run Bamboo jobs in a container image, it will look like this\nin your spec:\n\n\n```yaml\n\n---\n\nversion: 2\n\n# ...\n\ndocker: ubuntu\n\n```\n\n\nThis might be defined at the plan or job level. You can define it in GitLab\nas follows:\n\n\n```yaml\n\nimage: ubuntu\n\n```\n\n\n[Learn more about running CI/CD jobs in containers\nhere](https://docs.gitlab.com/ee/ci/docker/using_docker_images.html). If\nyour use case does not include containers, you can explore [other\nexecutors](https://docs.gitlab.com/runner/executors/).\n\n\n### Stages\n\n\nIn Bamboo, stages and their list of jobs are defined first, before the job\ndefinitions:\n\n\n```yaml\n\nversion: 2\n\nstages:\n  - First Stage:\n      jobs:\n        - Job 1A \n        - Job 1B\n  - Second Stage:\n      jobs:\n        - Job 2A \n        - Job 2B\n\nJob 1A:\n  tasks:\n    - clean\n    - script\n        - touch file1A.txt\n\nJob 1B:\n  tasks:\n    - clean\n    - script\n        - touch file1B.txt\n\nJob 2A:\n  tasks:\n    - clean\n    - script\n        - touch file2A.txt\n\nJob 2B:\n  tasks:\n    - clean\n    - script\n        - touch file2B.txt\n```\n\n\nIn GitLab, you list your stages in the order in which you want their jobs to\nrun:\n\n\n```yaml\n\nstages:\n  - build\n  - test\n  - deploy\n\njob1:\n  stage: build\n  script:\n    - echo \"This job compiles code.\"\n\njob2:\n  stage: test\n  script:\n    - echo \"This job tests the compiled code. It runs when the build stage completes.\"\n\njob3:\n  script:\n    - echo \"This job also runs in the test stage\".\n\njob4:\n  stage: deploy\n  script:\n    - echo \"This job deploys the code. It runs when the test stage completes.\"\n  environment: production\n```\n\n\nAll the jobs in a stage run in parallel and when they succeed, execution\nproceeds to the next stage. This only changes in complex pipelines where a\njob depends on another using\n[`needs`](https://docs.gitlab.com/ee/ci/yaml/index.html#needs).\n\n\n### Variables\n\n\nBamboo has System, Global, Project, Plan, and Build-specific variables,\nwhich can be accessed using the format `${system.variableName}` for system\nvariables and `${bamboo.variableName}` for others. Periods (.) are replaced\nby underscores (_) when variables are accessed in scripts.\n\n\n```yaml\n\nversion: 2\n\n# ...\n\nvariables:\n  username: admin\n  releaseType: milestone\n\nDefault job:\n  tasks:\n    - script: echo 'Release Type is $bamboo_releaseType'\n```\n\n\nIn GitLab, variables can be defined at group, project, CI Script, and job\nlevels. In GitLab self-managed and GitLab Dedicated, administrators can\ndefine variables at the instance level. GitLab allows protecting, masking,\nand expanding variables. Protected variables are only accessible for\npipelines running against the default or protected branches. [Learn more\nabout CI/CD variables](https://docs.gitlab.com/ee/ci/variables/index.html)\nand [where you can use\nthem](https://docs.gitlab.com/ee/ci/variables/where_variables_can_be_used.html).\n\n\nHere is an example:\n\n\n```yaml\n\nvariables:\n  GLOBAL_VAR: \"A global variable\"\n\njob1:\n  variables:\n    JOB_VAR: \"A job variable\"\n  script:\n    - echo \"Variables are '$GLOBAL_VAR' and '$JOB_VAR'\"\n\njob2:\n  script:\n    - echo \"Variables are '$GLOBAL_VAR' and '$JOB_VAR'\"\n```\n\n\n### Build Jobs\n\n\nBamboo Build Jobs are composed of tasks, each of which is a small unit of\nwork that can be anything from checking out source code to injecting\nvariables or running a script.\n\n\n```yaml\n\nversion: 2\n\nstages:\n  - Run Tests:\n      jobs:\n        - Test Ruby \n\nTest Ruby :\n  key: TEST\n  tasks:\n  - checkout:\n      force-clean-build: false\n      description: Checkout Default Repository\n  - script:\n      interpreter: SHELL\n      scripts:\n      - |-\n        ruby -v  # Print out ruby version for debugging\n        bundle config set --local deployment true  \n        bundle install -j $(nproc)\n        rubocop\n        rspec spec\n      description: run bundler & rspec\n\n```\n\n\nIn this example, the plan has two tasks, checkout and script. The checkout\ntasks pull an updated version of the code repository, which is made\navailable for the script task to execute its commands against. \n\n\n[Jobs in GitLab](https://docs.gitlab.com/ee/ci/jobs/) are composed of script\ncommands:\n\n\n```\n\nimage: ruby:latest\n\n\nstages:\n  - test\n\nrspec:\n  stage: test\n  script:\n    - ruby -v\n    - bundle config set --local deployment true \n    - bundle install -j $(nproc)\n    - rubocop\n    - rspec spec\n```\n\n\nIn the example above, the stage the job belongs to is specified with the\n`stage` keyword and the commands to be executed by the GitLab runner for the\njob are listed under `script`.\n\n\nIn Bamboo, you can use\n[executables](https://confluence.atlassian.com/bamboo/defining-a-new-executable-capability-289277164.html),\nsuch as Ant, Maven, or PHPUnit, in a task to build your application. In\nGitLab, you can package the binaries you need in a custom container image\nand use it as your CI/CD image.\n\n\n### Deployment jobs\n\n\nIn Bamboo, Deployment projects organize software releases or environments\napplications are deployed to. A deployment plan can have a release\ndefinition:\n\n\n```yaml\n\n---\n\nversion: 2\n\n\ndeployment:\n  name: Release Software\n  source-plan: BUILD-APP\n\nrelease-naming: release-1.1\n\n```\n\n\nFor releases, you specify the plan it should get the generated artifacts\nfrom. And for deployment for environments:\n\n\n```yaml\n\n---\n\nversion: 2\n\n# ...\n\nenvironments:\n  - Test\n  - QA\n  - Prod\n\nTest:\n  tasks:\n    - clean\n    - artifact-download:\n        destination: /workdir\n```\n\n\nIn GitLab CI/CD, you can create a [deployment\njob](https://docs.gitlab.com/ee/ci/jobs/#deployment-jobs)\n\nthat deploys to an\n[environment](https://docs.gitlab.com/ee/ci/environments/index.html) or\ncreate a [release](https://docs.gitlab.com/ee/user/project/releases/).\n\nFor deploying to an environment, you use the\n[`environment`](https://docs.gitlab.com/ee/ci/yaml/#environment) keyword:\n\n\n```yaml\n\ndeploy-to-production:\n  stage: deploy\n  script:\n    - # Run Deployment script\n    - ./.ci/deploy_prod.sh\n  environment:\n    name: production\n```\n\n\nIf you are creating a release instead, you use the\n[`release`](https://docs.gitlab.com/ee/ci/yaml/#release)\n\nkeyword along with the\n[release-cli](https://gitlab.com/gitlab-org/release-cli/-/tree/master/docs)\n\ntool to create releases for [Git\ntags](https://docs.gitlab.com/ee/user/project/repository/tags/).\n\nThe `release` section is executed after the `script` section, which must\nexist.\n\nIf you don’t have any script commands to run, you can put a placeholder\ncommand;\n\nfor example, `echo` a message.\n\n\n```yaml\n\nrelease_job:\n  stage: release\n  image: registry.gitlab.com/gitlab-org/release-cli:latest\n  rules:\n    - if: $CI_COMMIT_TAG                  # Run this job when a tag is created manually\n  script:\n    - echo \"Building release version\"\n  release:\n    tag_name: $CI_COMMIT_TAG\n    name: 'Release $CI_COMMIT_TAG'\n    description: 'Release created using the release-cli.'\n```\n\n\n### Rules and workflows\n\n\nIn Bamboo, triggers can be used to control how Jobs are executed. Triggers\ncan be a periodic poll of the repository for changes or a webhook that\nnotifies Bamboo of changes to the repository. Trigger conditions can be\nenabled in the Bamboo web UI to make sure the build only runs if other plans\nare passing.\n\n\nExample of a trigger:\n\n\n```yaml\n\n---\n\nversion: 2\n\ntriggers:\n  - polling: 130\n  - cron: 0 * * * ? *\n```\n\n\nIn GitLab, [CI/CD pipelines](https://docs.gitlab.com/ee/ci/pipelines/) can\nbe triggered by a commit/push, a merge, manually, on schedule, or with\n[pipeline\nsubscriptions](https://docs.gitlab.com/ee/ci/pipelines/#trigger-a-pipeline-when-an-upstream-project-is-rebuilt).\nJobs in a pipeline can further be controlled using `rules` or `workflow`.\nLearn more about [Job\nControl](https://docs.gitlab.com/ee/ci/jobs/job_control.html) and [pipeline\nworkflows](https://docs.gitlab.com/ee/ci/yaml/workflow.html#) in GitLab\nCI/CD. \n\n\nHere's an example using `rules` in GitLab CI/CD:\n\n\n```yaml\n\nworkflow:\n  rules:\n    - changes:\n      - .gitlab/**/**.md\n      when: never\n```\n In this example, pipelines are never executed when `.md` files changed in the `.gitlab` folder.\n\n#### Artifacts\n\n\nYou can define Job artifacts using the `artifacts` keyword in both GitLab\nand Bamboo.\n\n\nIn Bamboo, artifacts can be defined as follows:\n\n\n```yaml\n\n---\n\nversion: 2\n\n# ...\n  artifacts:\n    -\n      name: Test Reports\n      location: target/reports\n      pattern: '*.xml'\n      required: false\n      shared: false\n    -\n      name: Special Reports\n      location: target/reports\n      pattern: 'special/*.xml'\n      shared: true\n```\n\n\nIn the Bamboo Spec above, artifacts are defined with a name, location,\npattern, and optionally\n\nthe ability to share the artifacts with other jobs or plans. You can go\nfurther to define jobs that\n\ncan subscribe to the artifact.\n\n\n`artifact-subscriptions` is used to access artifacts from another job in the\nsame plan:\n\n\n```yaml\n\nTest app:\n  artifact-subscriptions:\n    -\n      artifact: Test Reports\n      destination: deploy\n```\n\n\n`artifact-download` is used to access artifacts from jobs in a different\nplan.\n\n\n```yaml\n\n---\n\nversion: 2\n\n# ...\n  tasks:\n    - artifact-download: \n        source-plan: PROJECTKEY-PLANKEY\n```\n\n\nYou need to provide the key of the plan you are downloading artifacts from\nin the `source-plan` keyword.\n\n\nIn GitLab, all artifacts from completed jobs in the previous stages are\ndownloaded by default.\n\nHere is an example of an artifact definition in GitLab:\n\n\n```yaml\n\npdf:\n  script: xelatex mycv.tex\n  artifacts:\n    name: \"pdf-files\"\n    public: false\n    untracked: true\n    paths:\n      - pdfs/\n    exclude:\n      - pdfs/*.tex\n```\n\n\nIn the CI/CD script above:\n\n\n- The name of the artifact is specified laterally. You can choose to make it\ndynamic by using a CI/CD variable.\n\n- The `public` keyword is used to set whether the artifact should be\npublicly available. This is not enabled by default\n  on self-managed GitLab instances. An administrator can enable it with the [feature flag](https://docs.gitlab.com/ee/administration/feature_flags/)\n  named `non_public_artifacts`.\n- You can set the `untracked` to include or exclude Git untracked files\nalong with those specified using `paths`.\n\n\nRead more about GitLab CI/CD [job\nartifacts](https://docs.gitlab.com/ee/ci/jobs/job_artifacts.html).\n\n\n## How to plan your migration\n\n\nPlanning a migration to Gitlab CI/CD from Bamboo doesn't start with\nconverting your Bamboo plan to GitLab CI/CD scripts. It starts with aligning\nwith your leadership and stakeholders and clearly communicating the vision\nof the migration. [Check out our documentation to learn more about managing\norganizational\nchanges](https://docs.gitlab.com/ee/ci/migration/plan_a_migration.html).\nOnce you have the necessary buy-in, you can proceed with the following\nsteps:\n\n\n- [Import your projects](https://docs.gitlab.com/ee/user/project/import/) to\nGitLab.\n\n- Identify the necessary binaries and build tools needed to build your\napplication, along with their dependencies.\n\n- Define the flow of your pipeline, which jobs depend on each other, and the\nnecessary triggers.\n\n- Learn more about [key GitLab CI/CD\nfeatures](https://docs.gitlab.com/ee/ci/index.html).\n\n- Identify the credentials and variables needed in your pipeline and define\nthem in the variable section of your project's CI/CD settings or using a\nsecret manager.\n\n- Follow [this\ntutorial](https://docs.gitlab.com/ee/ci/quick_start/index.html) to create\nyour first GitLab pipeline; you can also explore more [complex\npipelines](https://docs.gitlab.com/ee/ci/quick_start/tutorial.html).\n\n- Iterate and test your GitLab CI/CD pipelines and review [.gitlab-ci.yml\nkeyword reference](https://docs.gitlab.com/ee/ci/yaml/index.html).\n\n\nReady to make the move? [We’re here to\nhelp](https://about.gitlab.com/sales/).\n",[951,9],{"slug":3101,"featured":6,"template":698},"migrating-from-bamboo-to-gitlab-cicd","content:en-us:blog:migrating-from-bamboo-to-gitlab-cicd.yml","Migrating From Bamboo To Gitlab Cicd","en-us/blog/migrating-from-bamboo-to-gitlab-cicd.yml","en-us/blog/migrating-from-bamboo-to-gitlab-cicd",{"_path":3107,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3108,"content":3114,"config":3119,"_id":3121,"_type":14,"title":3122,"_source":16,"_file":3123,"_stem":3124,"_extension":19},"/en-us/blog/migration-guide-github-advanced-security-to-gitlab-ultimate",{"title":3109,"description":3110,"ogTitle":3109,"ogDescription":3110,"noIndex":6,"ogImage":3111,"ogUrl":3112,"ogSiteName":685,"ogType":686,"canonicalUrls":3112,"schema":3113},"Migration guide: GitHub Advanced Security to GitLab Ultimate","Understand the similarities and differences between GitLab Ultimate and GitHub Advanced Security. Then follow this in-depth tutorial to make the move to the GitLab DevSecOps platform.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749666187/Blog/Hero%20Images/blog-image-template-1800x945__6_.png","https://about.gitlab.com/blog/migration-guide-github-advanced-security-to-gitlab-ultimate","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Migration guide: GitHub Advanced Security to GitLab Ultimate\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2024-05-01\",\n      }",{"title":3109,"description":3110,"authors":3115,"heroImage":3111,"date":3116,"body":3117,"category":693,"tags":3118},[1599],"2024-05-01","GitLab is the most comprehensive AI-powered DevSecOps platform, enabling organizations to deliver more secure software faster with one platform for your entire software delivery lifecycle. GitHub provides an Advanced Security add-on, which enables additional security features within GitHub. However, it lacks the depth and breadth of security features provided natively by GitLab. Organizations looking to migrate to GitLab Ultimate to enhance their security across all areas of the SDLC can use this guide to compare the two offerings and as a tutorial to move to the GitLab platform.\n\nThis article includes:\n\n- [A comparison between GitLab Ultimate and GitHub Advanced Security](#a-comparison-between-gitlab-ultimate-and-github-advanced-security)\n- [How to migrate a GitHub repository to GitLab](#how-to-migrate-a-github-repository-to-gitlab)\n- [How to migrate from GitHub Advanced Security to GitLab Ultimate feature-by-feature](#how-to-migrate-feature-by-feature)\n- [An introduction to additional GitLab Ultimate's security features](#additional-gitlab-ultimate-security-features)\n\n## A comparison between GitLab Ultimate and GitHub Advanced Security\n\n[GitLab Ultimate](https://about.gitlab.com/pricing/ultimate/) is GitLab's top subscription tier for enterprises looking to deliver secure software faster. GitHub Advanced Security is an add-on to GitHub Enterprise, which enables additional security features.\n\n### Similarities between GitLab Ultimate and GitHub Advanced Security\n\nGitLab Ultimate and GitHub Advanced Security both provide:\n- Static Application Security Testing ([SAST](https://docs.gitlab.com/ee/user/application_security/sast/)), secret scanning, and dependency scanning\n- contextual vulnerability intelligence and resolution advice\n- a list of dependencies or software bill of materials ([SBOM](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/))\n- security metrics and insights\n\n### Differences between GitLab Ultimate and GitHub Advanced Security\n\nGitLab Ultimate differs from GitHub Advanced Security in the following ways:\n\n- GitLab natively provides additional code scanners such as container scanning, Dynamic Application Security Testing ([DAST](https://docs.gitlab.com/ee/user/application_security/dast/)), Web API fuzz testing, and more. These scanners are a mix of optimized proprietary and open source technologies with custom rulesets. For a full list, see the [GitLab AppSec documentation](https://docs.gitlab.com/ee/user/application_security/secure_your_application.html).\n- GitLab provides [granular security guardrails](https://docs.gitlab.com/ee/user/application_security/policies/) to prevent insecure code from being merged without approval.\n- GitLab security scanners can be run in [air-gapped or limited-connectivity environments](https://docs.gitlab.com/ee/user/application_security/offline_deployments/).\n- GitLab provides the [Compliance Center](https://docs.gitlab.com/ee/user/compliance/compliance_center/), which enables oversight of compliance violations across an entire organization.\n\nGitLab Ultimate also provides additional security and compliance capabilities, portfolio and value stream management, live upgrade assistance, and more. See the [GitLab Ultimate documentation](https://about.gitlab.com/pricing/ultimate/) to learn more about these additional features.\n\n## How to migrate a GitHub repository to GitLab\n\nGitLab provides a built-in importer, which allows you to import your GitHub projects from either GitHub.com or GitHub Enterprise to GitLab. The importer allows you to migrate not only the GitHub Repository to GitLab, but several other objects, including issues, collaborators (members), and pull requests. For a complete list of what can be migrated, see the [GitHub imported data documentation](https://docs.gitlab.com/ee/user/project/import/github.html#imported-data). You can perform the migration as follows:\n1. On the left sidebar, at the top, select **Create new (+)**.\n2. Select **New project/repository** under the **In GitLab** section.\n3. Select **Import project**.\n\n![Import project selection](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/1-Import-Project.png)\n\n4. Press the **GitHub** button.\n    - If using GitLab self-managed, then you must [enable the GitHub importer](https://docs.gitlab.com/ee/administration/settings/import_and_export_settings.html#configure-allowed-import-sources).\n    - Note that other importers can be initiated in the same way.\n5. Now, you can do one of the following:\n    - Authorize with GitHub Oauth by selecting **Authorize with GitHub**.\n    - Use a GitHub personal access token:\n       - Go to [https://github.com/settings/tokens/new](https://github.com/settings/tokens/new).\n       - In the **Note** field, enter a token description.\n       - Select the **repo** scope.\n       - Optionally, to import Collaborators, select the **read:org** scope.\n       - Press the **Generate token** button.\n       - On the GitLab import page, in the Personal Access Token field, paste the GitHub personal access token.\n6. Press the **Authenticate** button.\n7. Select the items you wish to migrate.\n8. Select the projects you wish to migrate and to where.\n9. Press the **Import** button.\n\nYour imported project should now be in your workspace. For additional guidance on migrating from GitHub to GitLab, watch this video:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/0Id5oMl1Kqs?si=HEpZVy94cpfPfAky\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\nYou can also perform the migration using a [GitHub personal access token](https://docs.gitlab.com/ee/user/project/import/github.html#use-a-github-personal-access-token) or the [GitLab REST API](https://docs.gitlab.com/ee/user/project/import/github.html#use-the-api). The importer also allows importing from other sources such as Bitbucket or Gitea. To learn more, read the [importer documentation](https://docs.gitlab.com/ee/user/project/import/).\n\n## How to migrate feature-by-feature\n\nLet’s go over how to leverage each feature provided by GitHub Advanced Security in GitLab Ultimate. You must have a [GitLab Ultimate license](https://about.gitlab.com/pricing/ultimate/) to continue. GitLab provides a [free trial](https://about.gitlab.com/free-trial/devsecops/) to get you started.\n\n### Code scanning\nGitHub provides code scanning to provide contextual vulnerability intelligence and advice for static source code. The same can be done within GitLab by enabling [SAST](https://docs.gitlab.com/ee/user/application_security/sast/). GitLab SAST scanners cover a wider set of programming languages and frameworks than GitHub’s [CodeQL](https://docs.github.com/en/code-security/code-scanning/introduction-to-code-scanning/about-code-scanning-with-codeql#about-codeql).\n\nTo enable code scanning in GitLab, you can simply add the [SAST template](https://docs.gitlab.com/ee/user/application_security/sast/#configure-sast-in-your-cicd-yaml) to your `.gitlab-ci.yml`:\n\n```yaml\ninclude:\n  - template: Jobs/SAST.gitlab-ci.yml\n```\n\nOnce the template has been added, any time new code is checked in, SAST will auto-detect the [programming languages](https://docs.gitlab.com/ee/user/application_security/sast/#supported-languages-and-frameworks ) used in your project. It will then scan the source code for known vulnerabilities.\n\n**Note:** Security scanners can also be added to your project using GitLab's [security configuration](https://docs.gitlab.com/ee/user/application_security/configuration/), which can automatically create a merge request to update your pipeline. To learn more, see the [Configure SAST by using the UI documentation](https://docs.gitlab.com/ee/user/application_security/sast/#configure-sast-by-using-the-ui).\n\nSAST results of the diff between the feature-branch and the target-branch display in the merge request widget. The merge request widget displays SAST results and resolutions that were introduced by the changes made in the merge request.\n\n![Security scanning in merge request](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/2-SAST-MR-View.png)\n\nEach vulnerability displays data to assist with remediation, including detailed description, severity, location, and resolution information:\n\n![SAST vulnerability details](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/3-SAST-MR-View-Detailed.png)\n\nYou can take action on these vulnerabilities:\n\n- **Dismiss vulnerability**: Allows a developer to dismiss the vulnerability with a comment. This assists the security team performing a review.\n- **Create issue**: Allows an issue to be created to keep track of a vulnerability that requires additional oversight.\n\nThese changes can also be seen inline when changing to the **Changes** view within the merge request.\n\n![SAST vulnerability changes view](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/4-SAST-MR-View-Changes.png)\n\n#### Customizing SAST scanners\n\nGitLab allows you to override a SAST job definition so you can change properties like variables, dependencies, or rules. You can do this by declaring a job with the same name as the SAST job to override. Then, place this new job after the template inclusion and specify any additional keys under it.\n\nFor example, the following configuration:\n- overwrites the version the `semgrep-sast` scanner uses\n- runs a script to fetch modules from private projects before running `gosec-sast`\n- configures all scanners to search at a maximum depth of 10\n\n```yaml\ninclude:\n  - template: Jobs/SAST.gitlab-ci.yml\n\nvariables:\n  SEARCH_MAX_DEPTH: 10\n\nsemgrep-sast:\n  variables:\n    SAST_ANALYZER_IMAGE_TAG: \"3.7\"\n\ngosec-sast:\n  before_script:\n    - |\n      cat \u003C\u003CEOF > ~/.netrc\n      machine gitlab.com\n      login $CI_DEPLOY_USER\n      password $CI_DEPLOY_PASSWORD\n      EOF\n```\n\n**Note:** The available SAST jobs can be found in the [`SAST.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml). Configurations can be found in the [Available SAST CI/CD variables documentation](https://docs.gitlab.com/ee/user/application_security/sast/#available-cicd-variables).\n\n#### Customizing SAST rulesets\n\nFor each SAST analyzer, GitLab processes the code then uses rules to find possible weaknesses in source code. These rules determine what types of weaknesses the scanner reports.\n\n- For Semgrep-based SAST scanners, GitLab creates, maintains, and supports the rules that are used. It combines the Semgrep open source engine, GitLab-managed detection rules, and GitLab proprietary technology for vulnerability tracking and false positive detection.\n- For other SAST analyzers, the rules are defined in the upstream projects for each scanner.\n\nYou can customize the behavior of the SAST scanners by defining a ruleset configuration file in the repository being scanned:\n- Disable predefined rules (available for all analyzers)\n- Override predefined rules (available for all analyzers)\n- Replace predefined rules by synthesizing a custom configuration using passthroughs\n\nFor more information and examples on configuring SAST rules, see the [SAST rules](https://docs.gitlab.com/ee/user/application_security/sast/rules.html) and [Customizing rulesets documentation](https://docs.gitlab.com/ee/user/application_security/sast/customize_rulesets.html).\n\n### Secret scanning\n\nGitHub provides secret scanning, which can find, block, and revoke leaked secrets. The same can be done within GitLab by enabling [Secret Detection](https://docs.gitlab.com/ee/user/application_security/secret_detection/).\n\nTo enable Secret Detection in GitLab, you can simply add the following template to your `.gitlab-ci.yml`:\n\n```yaml\ninclude:\n  - template: Jobs/Secret-Detection.gitlab-ci.yml\n```\n\nOnce the template has been added, any time new code is checked in (or a pipeline is run), the secret scanner will scan the source code for known secrets. Pipeline Secret Detection scans different aspects of your code, depending on the situation. For all methods except the “Default branch”, Pipeline Secret Detection scans commits, not the working tree. See the [Secret detection coverage documentation](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/#coverage) to learn more about how secret scanning works.\n\nWhen creating a merge request, Secret Detection scans every commit made on the source branch. Just like in SAST, each detected vulnerability provides the following information (such as location) and identifiers to assist with the remediation process:\n\n![Secret Detection vulnerability details](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/5-Secret-Detection-MR-Detailed.png)\n\nSimilar to SAST, you can take action on these vulnerabilities straight from the merge request, including dismissing vulnerabilities, and creating issues.\n\n#### Customizing Secret Detection jobs\n\nGitLab allows you to override a Secret Detection job definition so you change properties like variables, dependencies, or rules. You can do this by declaring a job with the same name as the Secret Detection job. Then place this new job after the template inclusion and specify any additional keys under it. For example, the following configuration:\n\n- overwrites the stage the secret detection job runs on to `security`\n- enables the historic scanning\n- changes the Secrets Analyzer version to 4.5\n\n```yaml\ninclude:\n  - template: Jobs/Secret-Detection.gitlab-ci.yml\n\nsecret_detection:\n  stage: security\n  variables:\n    SECRET_DETECTION_HISTORIC_SCAN: \"true\"\n    SECRETS_ANALYZER_VERSION: \"4.5\"\n```\n\n**Note:** The available Secret Detection jobs can be found in the [SAST.gitlab-ci.yml template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/Secret-Detection.gitlab-ci.yml). Available configurations can be found in the [Available Secret Detection CI/CD variables documentation](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/#customizing-analyzer-settings).\n\n#### Customizing Secret Detection rulesets\n\nThe Secret Detection analyzer allows you to customize which secrets are reported in the GitLab UI. The following customization options can be used separately, or in combination:\n\n- disable predefined rules\n- override predefined rules\n- synthesize a custom configuration\n- specify a remote configuration file\n\nFor example, by creating the file `.gitlab/secret-detection-ruleset.toml`, in the root directory of your project, the default GitLeaks package is extended to ignore test tokens from detection:\n\n```yaml\n### extended-gitleaks-config.toml\ntitle = \"extension of gitlab's default gitleaks config\"\n\n[extend]\n### Extends default packaged path\npath = \"/gitleaks.toml\"\n\n[allowlist]\n  description = \"allow list of test tokens to ignore in detection\"\n  regexTarget = \"match\"\n  regexes = [\n    '''glpat-1234567890abcdefghij''',\n  ]\n```\n\nFor more information on overriding the predefined analyzer rules, check out the [Secret Detection documentation](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/#override-predefined-analyzer-rules).\n\n#### Automatic response to leaked secrets\n\nGitLab Secret Detection automatically responds when it finds certain types of leaked secrets. Automatic responses can:\n- automatically revoke the secret\n- notify the partner that issued the secret and the partner can then revoke the secret, notify its owner, or otherwise protect against abuse\n\nGitLab can also notify partners when credentials they issue are leaked in public repositories on GitLab.com. If you operate a cloud or SaaS product and you’re interested in receiving these notifications, you can implement a Partner API, which is called by the GitLab Token Revocation API.\n\nSee the [Automatic response to leaked secrets documentation](https://docs.gitlab.com/ee/user/application_security/secret_detection/automatic_response.html) to learn more.\n\n### Supply chain security\n\nGitHub enables you to secure, manage, and report on software supply chains with automated security and version updates and one-click SBOMs. GitLab can meet your supply chain security needs using the Dependency Scanning and Dependency List (SBOM) features.\n\nTo enable Dependency Scanning in GitLab, you can simply add the following template to your `.gitlab-ci.yml`:\n\n```yaml\ninclude:\n  - template: Jobs/Dependency-Scanning.gitlab-ci.yml\n```\n\nOnce the template has been added, any time new code is checked in, Dependency Scanning will auto-detect the [package managers](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#supported-languages-and-package-managers) used in your project. It will then scan the dependencies used for known vulnerabilities.\n\nDependency Scanning results of the diff between the feature-branch and the target-branch display in the merge request widget. The merge request widget displays Dependency Scanning results and resolutions that were introduced by the changes made in the merge request. Within a merge request, each vulnerability displays relevant information to assist with remediation such as identifiers, evidence, and solutions:\n\n![Dependency Scanner vulnerability details](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/6-Dependency-Scanner-MR-View-Detailed.png)\n\nSimilar to SAST and Secret Detection, you can take action on these vulnerabilities straight from the merge request, including dismissing vulnerabilities and creating issues.\n\n#### Configuring Dependency Scanning\n\nTo override a job definition (for example, to change properties like variables or dependencies), declare a new job with the same name as the one to override. Place this new job after the template inclusion and specify any additional keys under it. For example, the following code:\n\n- disables automatic remediation of vulnerable dependencies\n- requires a build job to complete before Dependency Scanning\n\n```yaml\ninclude:\n  - template: Jobs/Dependency-Scanning.gitlab-ci.yml\n\ngemnasium-dependency_scanning:\n  variables:\n    DS_REMEDIATE: \"false\"\n  dependencies: [\"build\"]\n```\n\nTo learn more about configuring the dependency scanners, see the [Customizing analyzer behavior documentation](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-analyzer-behavior).\n\n#### Generating an SBOM\n\nGitLab provides a Dependency List (SBOM) to review your project or group dependencies and key details about those dependencies, including their known vulnerabilities. This list is a collection of dependencies in your project, including existing and new findings. The Dependency List is generated after the dependency scanner runs successfully on the [default branch](https://docs.gitlab.com/ee/user/project/repository/branches/default.html). To access the Dependency List:\n\n1. On the left sidebar, select **Search or go to** and find your project.\n2. Select **Secure > Dependency List**.\n\n![Dependency list (SBOM)](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/7-Dependency-List.png)\n\nFrom here you can see the following information on your dependencies:\n\n| Field\t| Description |\n| ----- | ----------- |\n| Component\t| The dependency’s name and version. |\n| Packager | The packager used to install the dependency. |\n| Location | For system dependencies, this lists the image that was scanned. For application dependencies, this shows a link to the packager-specific lock file in your project that declared the dependency. It also shows the dependency path to a top-level dependency, if any, and if supported. |\n| License | Links to dependency’s software licenses. A warning badge that includes the number of vulnerabilities detected in the dependency. |\n| Projects | Links to the project with the dependency. If multiple projects have the same dependency, the total number of these projects is shown. To go to a project with this dependency, select the Project's number, then search for and select its name. The project search feature is supported only on groups that have up to 600 occurrences in their group hierarchy. |\n\n\u003Cp>\u003C/p>\n\nSee the [Dependency List documentation](https://docs.gitlab.com/ee/user/application_security/dependency_list/) to learn more.\n\n### Security and compliance administration\n\nGitHub Advanced Security allows you to view security metrics and insights and assess code security risk. Now let’s examine how to do the same with GitLab Ultimate.\n\n#### Viewing security metrics and insights\n\nGitLab provides [Security dashboards](https://docs.gitlab.com/ee/user/application_security/security_dashboard/) to help assess the security posture of your applications. These dashboards display a collection of metrics, ratings, and charts for the vulnerabilities detected by the security scanners run on your project:\n\n- vulnerability trends over a 30-, 60-, or 90-day timeframe for all projects in a group\n- a letter grade rating for each project based on vulnerability severity\n- the total number of vulnerabilities detected within the past 365 days, including their severity\n\nTo access the Security dashboard:\n\n1. On the left sidebar, select **Search or go to** and find your project or group.\n2. From the side tab, select **Secure > Security** dashboard.\n3. Filter and search for what you need.\n\nThe group view displays your security posture for all projects in your group:\n\n![Group Security dashboard](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/8-SD-Group.png)\n\nThe project view displays your security posture for just the project:\n\n![Project Security dashboard](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/9-SD-Project.png)\n\n#### Assess code security risk\n\nGitLab Ultimate features a [Vulnerability Report](https://docs.gitlab.com/ee/user/application_security/vulnerability_report/), which provides information about vulnerabilities from scans of the default branch. It contains cumulative results of all successful jobs, regardless of whether the pipeline was successful. At all levels, the Vulnerability Report contains:\n\n- totals of vulnerabilities per severity level\n- filters for common vulnerability attributes\n- details of each vulnerability, presented in tabular layout\n\n![Vulnerability Report](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/10-Vulnerability-Report.png)\n\nClicking on a vulnerability enables access to its [Vulnerability Page](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/), which contains details of the vulnerability including a description, location, identifiers, and more. Below is an example of the Vulnerability Page for an SQL Injection vulnerability detected by our SAST scanner:\n\n![SQL Injection Vulnerability Page](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/11-Vulnerability-Page-1.png)\n\nFrom here the security team can collaborate by [changing the status of a vulnerability](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#change-the-status-of-a-vulnerability) along with a reason and [creating issues to better track changes](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#create-a-gitlab-issue-for-a-vulnerability).\n\nFrom the Vulnerability Page, you can also leverage [GitLab Duo](https://about.gitlab.com/gitlab-duo/), our AI-powered suite of features, to explain the vulnerability and [automatically create a merge request that resolves the vulnerability](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#vulnerability-resolution).\nGitLab Duo's [Vulnerability Explanation](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#vulnerability-explanation) uses a large language model to:\n\n- summarize the vulnerability.\n- help developers and security analysts to understand the vulnerability, how it could be exploited, and how to fix it\n- provide a suggested mitigation\n\n![SQL Injection GitLab Duo AI explanation](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/13-Explain-Vulnerability.png)\n\n## Additional GitLab Ultimate security features\n\nGitLab Ultimate contains many more security features that cannot be found within GitHub Advanced Security. A few examples of these additional security features are: additional security scanners for the complete software development lifecycle (SDLC), granular security guardrails, and custom permissions.\n\n### Security scanners for the entire SDLC\n\nOur portfolio of security scanners extends spans the SDLC.\n\n| Scanner Name | Scans | Languages/Files scanned |\n|  -------------- | ----- | ------------------------- |\n| [Static Application Security Testing (SAST)](https://docs.gitlab.com/ee/user/application_security/sast/) | Static source code | C/C++, Java, Python, Go, JavaScript, C#, and more |\n| [Dynamic Application Security Testing (DAST)](https://docs.gitlab.com/ee/user/application_security/dast/) | Running web application, live API | Language-agnostic |\n| [Infrastructure as Code (IaC) Scanning](https://docs.gitlab.com/ee/user/application_security/iac_scanning/) | IaC files |Terraform, AWS Cloud Formation, Ansible, and more |\n| [Container Scanning](https://docs.gitlab.com/ee/user/application_security/container_scanning/) | Static and running container images | Dockerfile |\n| [Dependency Scanning and License Scanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/) | Application dependencies | Requirements.txt, Yarn, Gradle, Npm, and more |\n| [Web API Fuzz Testing](https://docs.gitlab.com/ee/user/application_security/api_fuzzing/) | Sends random/malformed data to web-api | OpenAPI, GraphQL, HAR, Postman Collection |\n| [Coverage-guided Fuzz Testing](https://docs.gitlab.com/ee/user/application_security/coverage_fuzzing/) | Sends random/malformed data to function | C/C++, Go, Swift, Python, Rust, Java, JavaScript, AFL |\n\n\u003Cp>\u003C/p>\n\nGitLab also allows you to integrate [third-party scanners](https://about.gitlab.com/blog/integrate-external-security-scanners-into-your-devsecops-workflow/) and [custom scanners](https://about.gitlab.com/blog/how-to-integrate-custom-security-scanners-into-gitlab/) into the platform. Once integrated, the scanner results are automatically presented in various places in GitLab, such as the Pipeline view, merge request widget, and Security dashboard. See the [Security Scanner Integration documentation](https://docs.gitlab.com/ee/development/integrations/secure.html) to learn more.\n\n### Granular security and compliance policies\n\nPolicies in GitLab provide security and compliance teams with [a way to enforce controls globally in their organization](https://about.gitlab.com/blog/meet-regulatory-standards-with-gitlab/). Security teams can ensure:\n\n- security scanners are enforced in development team pipelines with proper configuration\n- all scan jobs execute without any changes or alterations\n- proper approvals are provided on merge requests based on results from those findings\n\n![Merge Request Security Policies](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/14-MR-Policy.png)\n\nCompliance teams can centrally enforce multiple approvers on all merge requests and ensure various settings are enabled on projects in scope of organizational requirements, such as enabling or locking merge request and repository settings. To learn more see the [GitLab Security Policy](https://docs.gitlab.com/ee/user/application_security/policies/) documentation.\n\n### Custom roles and granular permissions\n\n[GitLab Ultimate provides custom roles](https://about.gitlab.com/blog/how-to-tailor-gitlab-access-with-custom-roles/), which allow an organization to create user roles with the precise privileges and permissions required for that organization’s needs.\n\nFor example, a user could create a “Security Auditor” role with permissions to view security vulnerabilities in the system, but not be able to view source code, nor perform any changes within the repository. This granular set of permissions enables well-defined separation of duties.\n\n![Custom role creation](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/15-Custom-Roles.png)\n\nTo learn more see the [Custom Roles](https://docs.gitlab.com/ee/user/custom_roles.html) and [available Granular Permissions](https://docs.gitlab.com/ee/user/custom_roles/abilities.html) documentation.\n\n### Compliance Center\n\nThe Compliance Center is the central location for compliance teams to manage their compliance standards’ adherence reporting, violations reporting, and compliance frameworks for their group. The Compliance Center includes the following:\n\n- [Compliance standards adherence dashboard](https://docs.gitlab.com/ee/user/compliance/compliance_center/compliance_standards_adherence_dashboard.html) lists the adherence status of projects complying to the GitLab standard.\n- [Compliance violations report](https://docs.gitlab.com/ee/user/compliance/compliance_center/compliance_violations_report.html) shows a high-level view of merge request activity for all projects in the group.\n- [Compliance frameworks report](https://docs.gitlab.com/ee/user/compliance/compliance_center/compliance_frameworks_report.html) shows all the compliance frameworks in a group.\n- [Compliance projects report](https://docs.gitlab.com/ee/user/compliance/compliance_center/compliance_projects_report.html) shows the compliance frameworks that are applied to projects in a group.\n\n![Compliance Center](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674404/Blog/Content%20Images/16-Compliance-Center.png)\n\nThese dashboards assist with making sure separation of duties is being followed to optimize compliance within your organization. To learn more see the [Compliance Center documentation](https://docs.gitlab.com/ee/user/compliance/compliance_center/).\n\n## Read more\n\nThis article covers only a portion of the wide range of security features GitLab Ultimate offers. Check out these resources to learn more about how GitLab Ultimate can help enhance your organizational security and developer efficiency:\n\n- [Why GitLab Ultimate](https://about.gitlab.com/pricing/ultimate/)\n- [Getting Started with DevSecOps Tutorial](https://gitlab-da.gitlab.io/tutorials/security-and-governance/devsecops/simply-vulnerable-notes/)\n- [Getting Started with DevSecOps Sample Project](https://gitlab.com/gitlab-da/tutorials/security-and-governance/devsecops/simply-vulnerable-notes)\n- [Import your project from GitHub to GitLab documentation](https://docs.gitlab.com/ee/user/project/import/github.html)\n- [Migrating from GitHub Actions documentation](https://docs.gitlab.com/ee/ci/migration/github_actions.html)\n- [Tutorial: Create and run your first GitLab CI/CD pipeline](https://docs.gitlab.com/ee/ci/quick_start/)\n- [Tutorial: Create a complex pipeline](https://docs.gitlab.com/ee/ci/quick_start/tutorial.html)\n- [CI/CD YAML syntax reference](https://docs.gitlab.com/ee/ci/yaml/)",[719,2493,693,9,972],{"slug":3120,"featured":91,"template":698},"migration-guide-github-advanced-security-to-gitlab-ultimate","content:en-us:blog:migration-guide-github-advanced-security-to-gitlab-ultimate.yml","Migration Guide Github Advanced Security To Gitlab Ultimate","en-us/blog/migration-guide-github-advanced-security-to-gitlab-ultimate.yml","en-us/blog/migration-guide-github-advanced-security-to-gitlab-ultimate",{"_path":3126,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3127,"content":3133,"config":3139,"_id":3141,"_type":14,"title":3142,"_source":16,"_file":3143,"_stem":3144,"_extension":19},"/en-us/blog/modern-software-development-problems-require-modern-ai-powered-devsecops",{"title":3128,"description":3129,"ogTitle":3128,"ogDescription":3129,"noIndex":6,"ogImage":3130,"ogUrl":3131,"ogSiteName":685,"ogType":686,"canonicalUrls":3131,"schema":3132},"Modern software development problems require modern AI-powered DevSecOps","Learn how applying AI can help solve common development challenges.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662840/Blog/Hero%20Images/ai-experiment-stars.png","https://about.gitlab.com/blog/modern-software-development-problems-require-modern-ai-powered-devsecops","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Modern software development problems require modern AI-powered DevSecOps\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"William Arias\"}],\n        \"datePublished\": \"2023-09-07\",\n      }",{"title":3128,"description":3129,"authors":3134,"heroImage":3130,"date":3136,"body":3137,"category":801,"tags":3138},[3135],"William Arias","2023-09-07","\nThe landscape of software development is already being impacted by the integration of AI tools. GitLab is making that impact a positive one. Infusing AI throughout the software development lifecycle lowers the barrier for everyone to contribute. Let's take a look how organizations can evolve the idea _\"every company is a software company\"_ into _\"every company should be an AI-first company\"_ and how this transformation can happen by embracing AI-powered DevSecOps.\n\nAn effective way to demonstrate how AI can be a game-changer for software development teams is to walk through a detailed scenario. In this example, you'll see how applying [GitLab Duo](https://about.gitlab.com/gitlab-duo/), the suite of AI capabilities powering workflows, to a dilemma that many organizations face – how to automate complex customer-facing tools – can improve the experience for DevSecOps teams and customers alike.\n\n## Challenge: Keeping customer Q&A systems from going stale\nImagine a large financial services firm has an extensive FAQ section on its website but keeping that information up to date and easy to navigate has become increasingly difficult. Also, the FAQs don't cover queries that are less frequent but should be easy to respond to.\n\n![FAQ](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/faq-improved.gif)\n\nTo resolve this, the company proposed building an AI-enabled Q&A system that not only answers common queries but also other finance-related questions. The process to build the AI-enabled system described above can look like this:\n\n![process](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/process-demo.png)\n\n### Problem definition and collaboration\nThe journey begins with a GitLab issue. This is where the team collaborates, discusses ideas, and brainstorms to define the problem and scope of work. Collaborating on solving the Q&A system problem results in a lengthy issue thread where it can be challenging to quickly understand the status of the work. Anyone new to the issue or even current participants may need to spend considerable time reading comments to get an overview of the problem status and the decisions that have been made. AI-driven text summarization simplifies understanding of the comments, allowing developers and business teams to align on the problem and requirements.\n\n![AI-driven text summary](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/summary-hd.png)\n\nText summarization applied to issue comments \n{: .note.text-center}\n\nGitLab provides an AI-generated summary right in the issue where all the collaboration took place, thus helping development teams to get an overview of the main points and takeaways. The AI-generated summary enables developers and other team members to quickly extract and review key points from the issue without having to read through long conversations.\n\n### Solution development\nThe proposed solution involves leveraging a large language model (LLM) to build a Q&A system. The AI-generated summary allows developers to quickly break down the requirements and store them in GitLab using [Requirements management](https://docs.gitlab.com/ee/user/project/requirements/).  The requirements are stored as easy-to-understand items with their own criteria of success and expected behavior as depicted in the figure below:\n\n![requirements](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/requirements.png)\n\nAI Q&A system requirements in GitLab \n{: .note.text-center}\n\nHaving clear requirements is key to starting implementation of a successful solution, creating a [merge request](https://docs.gitlab.com/ee/user/project/merge_requests/), and iterating on the solution to the problem.\n\nWhile working towards a solution, GitLab Duo Code Suggestions can help developers focus. Leveraging generative AI, Code Suggestions provides relevant suggestions reducing time spent on repetitive or common code. This frees up developers to focus their efforts on complex problems that require deeper understanding of the codebase and business logic. \n\n![code-suggestions](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/code-suggestion.png)\n\nGenerated code speeds up development\n{: .note.text-center}\n\nDevelopers get code suggestions inline in their integrated development environment, or IDE, as they type or after prompting the model with code comments. The suggestions are predicted based on the code written before the cursor and the file extension of the script.\n\n### Efficient code review\nLet's consider that, until this point, developers working on the AI-enabled Q&A system have committed code contributions to build the solution. And it is also evident that infusing AI into the software development lifecycle has allowed the team to move faster. To move faster without compromising quality, however, it’s important to carry out code reviews on all the contributions done so far by the development team. Code reviews help developers share knowledge and maintain high-quality software. Companies like the one in this scenario might have a large number of developers, and identifying suitable reviewers can be challenging and time consuming. To resolve this, the company can leverage [GitLab Duo Suggested Reviewers](https://about.gitlab.com/gitlab-duo/). This feature employs machine learning to suggest the most relevant code reviewers, streamlining and speeding up the review process. \n\n![suggested-reviewers](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/suggested-review-sa.png)\n\nSuggested reviewers appear in the reviewer dropdown\n{: .note.text-center}\n\nThe machine learning algorithm that powers Suggested Reviewers learns from the project’s contribution graph data; the suggestions it generates are contextual to the changes made in the code.   \n\n### Code review summarization and test generation\nAI-driven text summarization helps in understanding suggested code changes derived from the code review at a glance. In our scenario, once the appropriate reviewer has been chosen, this person can go through the code and submit comments, propose changes, or identify potential edge cases that may have been overlooked. Getting a quick summary of all the suggested code changes before diving into the details can potentially speed up software delivery.\n\n![mr-summary](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/MR-Summary.png)\n\nCode Review summary\n{: .note.text-center}\n\nAs part of the review, it is important to make sure that all the functions have been tested. If there was the case of an overlooked test, once again it is possible to leverage generative AI within GitLab to create [test cases](https://docs.gitlab.com/ee/ci/test_cases/).\n\n![suggest-test-cases](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/suggest-test-cases.png)\n\nTest cases generated by AI\n{: .note.text-center}\n\nIn the figure below, we see the test code generated by AI. This provides helpful assertions that can also serve as an inspiration for discovering edge cases that might have not been considered before.\n\n![test cases](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/test-cases-created.png)\n\nSuggested test cases\n{: .note.text-center}\n\nThis solidifies the quality of the project codebase, closing the loop of efficient code reviews.\n\n### Minimizing context switching\nAllowing developers to obtain guidance and answers to GitLab-related questions without leaving the platform can be achieved by interacting with GitLab Duo Chat functionality.This AI-powered feature reduces context switching and enables developers or other team members to ask open questions and request information using natural language. Let's assume that a developer wasn't exactly sure how to leverage AI to generate test cases like the ones requested during the code review. Using GitLab Duo Chat, the developer can ask how to achieve exactly that without having to leave the platform, as depicted in the figure below:\n\n![GitLab Duo Chat](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/chat.png)\n\nGitLab Duo Chat\n{: .note.text-center}\n\nInteracting with the platform using natural language provides a convenient way to achieve results and get things done while minimizing context switching, improving the software delivery experience and speed. Now, speaking of speed, let's dive into the realm of continuous integration/continuous delivery and deployment (CI/CD).\n\n### GitLab CI/CD \nThe AI-infused DevSecOps approach described so far aligns and enhances CI/CD methodologies. The financial services company in this scenario uses these continuous methodologies to deliver software faster since these are proven ways to automatically build, test, secure, and deploy software.\n\n![chatbot-pipeline](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/pipelines.png)\n\nGitLab CI/CD pipeline\n{: .note.text-center}\n\nThe figure above depicts the CI/CD pipeline that is triggered and used in the development of our LLM Financial Q&A system. The jobs it executes are:\n\n- Build: creates the container with its respective code and dependencies\n- Test: unit testing and security scanners, which help ensure that no code vulnerabilities are being introduced \n- Deploy: puts the solution in the hands of end users\n\nOnce the solution is deployed, end users can ask questions to it, and the FAQ problem can be considered solved.\n\n![chatbot](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/chatbot.gif)\n\nThe AI touchpoints explored in this scenario and that led to the faster development of our AI-powered Q&A system are seamlessly integrated within DevSecOps methodologies, enhancing the development and operations experience.\n\n![devsecops-ml](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/devsecops-embedded.png)\n\nUltimately, this increases the likelihood of a successful and secure deployment of the AI-powered Q&A system.\n\n### Recap\nGoing back to the process overview, in the following table we see how the different AI touchpoints are used throughout the software development lifecycle.\n\n|  | What AI-powered DevSecOps can do for you | Feature |\n| ------ | ------ | ------ |\n|![brainstorming](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/brainstorm.png) | AI generates a summary of the discussions and provide the most relevant information for your team to move forward in the solution development   |[Summarize issues](https://docs.gitlab.com/ee/user/ai_features.html#summarize-issue-discussions)       |\n|![requirements](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/solution-requirements.png)       | The team agrees on a chatbot and break down the application requirements and set criteria it must fulfill       |  [Requirements management](https://docs.gitlab.com/ee/user/project/requirements/)       |\n|![develop](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/develop.png)        | Developers leverage Code Suggestions to speed up code and generate tests       |   [Code Suggestions](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html)     |\n|![reviewer](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/suggested-reviewer.png)        | Quickly find the most suitable person to review code changes and move on in the delivery process       |  [Suggested Reviewers](https://docs.gitlab.com/ee/user/project/merge_requests/reviews/data_usage.html#:~:text=Suggested%20Reviewers%20is%20the%20first,and%20GitLab%20user%20account%20metadata.)      |\n|![code-review](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/review.png)        |  Summarize Merge Request, which includes code summary, helps contributors understand the most important changes that need to be addressed      | [Summarize Merge Request](https://docs.gitlab.com/ee/user/ai_features.html#summarize-merge-request-changes)      |\n|![pipeline](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/buildtest.png)        | CI/CD and integrated security       |   [CI/CD testing](https://docs.gitlab.com/ee/ci/testing/)    |\n|![deploy](https://about.gitlab.com/images/blogimages/2023-07-24-modern-problems-modern-devsecops/deploy.png)        | Kubernetes agent, and multi-cloud deployments      |  [Kubernetes integration](https://docs.gitlab.com/ee/user/clusters/agent/)      |\n\nWatch the video walkthrough of the financial services scenario:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/LifJdU3Qagw?si=fpfvuhJQ_jYmYrT2\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\nAs organizations embark on their AI journey, more and better software will have to be created. Drawing insights from DevSecOps combined with AI-powered workflows provides a solid foundation to enjoy the benefits of AI, unlock new use cases, and provide robust solutions that learn from data. Ultimately, AI enables organizations to enhance software development practices and to tackle modern problems with modern solutions.\n\nGet started with GitLab Duo today with this [free trial of GitLab Ultimate](https://gitlab.com/-/trials/new?glm_content=default-saas-trial&glm_source=localhost%2Fsolutions%2Fai%2F).\n",[951,9,803],{"slug":3140,"featured":6,"template":698},"modern-software-development-problems-require-modern-ai-powered-devsecops","content:en-us:blog:modern-software-development-problems-require-modern-ai-powered-devsecops.yml","Modern Software Development Problems Require Modern Ai Powered Devsecops","en-us/blog/modern-software-development-problems-require-modern-ai-powered-devsecops.yml","en-us/blog/modern-software-development-problems-require-modern-ai-powered-devsecops",{"_path":3146,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3147,"content":3153,"config":3158,"_id":3160,"_type":14,"title":3161,"_source":16,"_file":3162,"_stem":3163,"_extension":19},"/en-us/blog/modernizing-a-simple-c-application-to-java-with-gitlab-duo",{"title":3148,"description":3149,"ogTitle":3148,"ogDescription":3149,"noIndex":6,"ogImage":3150,"ogUrl":3151,"ogSiteName":685,"ogType":686,"canonicalUrls":3151,"schema":3152},"Modernizing a simple C++ application to Java with GitLab Duo","Learn how to refactor code from memory unsafe languages to memory safe languages with the help of GitLab's AI capabilities, saving time and effort on application modernization.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749659507/Blog/Hero%20Images/AdobeStock_623844718.jpg","https://about.gitlab.com/blog/modernizing-a-simple-c-application-to-java-with-gitlab-duo","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Modernizing a simple C++ application to Java with GitLab Duo\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Cesar Saavedra\"}],\n        \"datePublished\": \"2024-12-11\",\n      }",{"title":3148,"description":3149,"authors":3154,"heroImage":3150,"date":3155,"body":3156,"category":801,"tags":3157},[798],"2024-12-11","Memory unsafe languages are those that do not handle any memory management\non behalf of the developer. For example, when programming in C or C++, if\nyou need memory during runtime, you will need to allocate and deallocate the\nmemory yourself, running the risk of ending up with memory leaks in cases\nwhen you inadvertently forget to deallocate it. Other languages like Ada and\nFORTRAN provide some memory management but may not prevent memory leaks.\nMany organizations, including those in the public sector, have applications\nthat have been developed using languages that are memory unsafe and are\noften looking to modernize these to a memory safe language, such as Java,\nPython, JavaScript, or Golang.\n\n\nThis tutorial focuses on a specific example of modernizing a simple C++\napplication to Java by refactoring it with the help of [GitLab\nDuo](https://about.gitlab.com/gitlab-duo/), our suite of AI capabilities,\nand shows how much time and effort you can save in the migration.\n\n\n## Understanding the simple C++ application\n\n\nLet’s make the assumption that we have been tasked with the migration of a\nC++ application to a memory safe language, namely Java. The C++ application\ncan be found in the following project (thank you to\n[@sugaroverflow](https://gitlab.com/sugaroverflow) for contributing this\nsample application):\n\n\n[https://gitlab.com/gitlab-da/use-cases/ai/ai-applications/refactor-to-java/air-quality-application](https://gitlab.com/gitlab-da/use-cases/ai/ai-applications/refactor-to-java/air-quality-application)\n\n\nSince this is the first time we are seeing this application, let’s invoke\nGitLab Duo Code explanation to better understand what it does. We open file\n`main.cpp` in Visual Studio Code and select the entirety of this file. We\nthen right-click and select **GitLab Duo Chat > Explain selected snippet**\nfrom the popup menu.\n\n\n![duo-code-explanation-menu-option](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675546/Blog/Content%20Images/code-explanation-menu-option.png)\n\n\nThe GitLab Duo Chat window opens up and the slash command `/explain` is\nexecuted for the selected code. Chat returns a very thorough and detailed\ndescription and explanation in natural language form of what each function\ndoes in the file as well as examples on how to run the compiled program.\n\n\n![code-explanation-text](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675547/Blog/Content%20Images/code-explanation-text.png)\n\n\nIn short, the simple C++ application takes a U.S. zip code as input and\nreturns the air quality index for that zip code.\n\n\n## Compiling and running the C++ application\n\n\nTo further understand this simple C++ application, we proceed to compile and\nrun it. We could have asked Chat how to do this, however, the project has a\nREADME file that provides the commands to compile the project, so we go\nahead and use those by entering them in the Terminal window of VS Code.\n\n\n![compile-command](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675547/Blog/Content%20Images/compile-command.png)\n\n\nAfter the compilation finishes, we change directory to the `build`\nsubdirectory in the project, which is where the compilation process places\nthe executable file for this application. Then, we run the executable by\nentering the following command:\n\n\n`./air_quality_app 32836`\n\n\nAnd we see the response as follows:\n\n\n`Air Quality Index (AQI) for Zip Code 32836: 2 (Fair)`\n\n\n![cplus-plus-app-execution-output](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675547/Blog/Content%20Images/cplus-plus-app-execution-output.png)\n\n\nThis confirms to us that the application was successfully compiled and it’s\nexecuting appropriately.\n\n\n## Refactoring the application to Java\n\n\nLet’s start migrating this C++ application to Java. We take advantage of\nGitLab Duo Chat and its refactoring capabilities by using the slack command\n`/refactor`. We qualify the slash command with specific instructions on what\nto do for the refactoring. We enter the following command in the Chat input\nfield:\n\n\n> /refactor this entire application to Java. Provide its associated pom.xml\nto build and run the Java application. Also, provide the directory structure\nshowing where all the resulting files should reside for the Java\napplication.\n\n\n![refactor-chat-output](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675547/Blog/Content%20Images/refactor-chat-output.png)\n\n\nChat returns a set of Java files that basically refactor the entire C++\napplication to the memory safe language. In addition and per the prompt,\nChat returns the pom.xml file, needed by\n[maven](https://docs.gitlab.com/ee/api/packages/maven.html) for the building\nand execution of the refactored application as well as its directory\nstructure, indicating where each generated file should reside.\n\n\nWe copy and save all the generated files to our local directory.\n\n\n## Creating the Java project\n\n\nIn VS Code, we now proceed to open an empty project in which we will set up\nthe directory structure of the new Java application and its contents.\n\n\nWe create all the previously generated Java files in their corresponding\ndirectories in the new project and paste their contents in each.\n\n\n![java-files-created](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675547/Blog/Content%20Images/java-files-created.png)\n\n\nLastly, we save all the files to our local disk.\n\n\n## Asking for help to build and run the Java application\n\n\nAt this point, we have an entire Java application that has been refactored\nfrom C++. Now, we need to build it but we don’t quite remember what maven\ncommand we need to use to accomplish this.\n\n\nSo we ask GitLab Duo Chat about this. We enter the following prompt in the\nChat input field:\n\n\n> How do you build and run this application using maven?\n\n\n![maven-info-output](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675547/Blog/Content%20Images/maven-info-output.png)\n\n\nChat returns with a thorough explanation on how to do this, including\nexamples of the maven command to build and run the newly created Java\napplication.\n\n\n## Building and running the Java application\n\n\nGitLab Duo Chat understands the application and environment context and\nresponds that we first need to create an environment variable called\n`API_KEY` before we can run the application.\n\n\nIt also provides the maven command to execute to build the application,\nwhich we enter in the Terminal window:\n\n\n```unset\n\nmvn clean package\n\n``` \n\n\n![java-build-output](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675547/Blog/Content%20Images/java-build-output.png)\n\n\nOnce the build finishes successfully, we copy the generated command to run\nthe application from the Chat window and paste it in the Terminal window:\n\n\n```unset\n\njava -jar target/air-quality-checker-1.0-SNAPSHOT-jar-with-dependencies.jar\n90210\n\n```\n\n\n![java-app-execution-output](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675547/Blog/Content%20Images/java-app-execution-output.png)\n\n\nThe application successfully executes and returns the string:\n\n\n```unset\n\nAir Quality Index (AQI) for Zip Code 90210: 2 (Fair)\n\n```\n\n\nWe have confirmed that the modernized version of the application, now\nrefactored in Java, runs just like its original C++ version.\n\n\n## Watch this tutorial in action\n\n\nWe have seen that by leveraging the power of GitLab Duo in your\nmodernization activities, you can save a great deal of time and effort,\nfreeing you to spend more time innovating and creating value to your\norganization.\n\n\nHere is a video to show you, in action, the tutorial you just read:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/LJ7GOr_P0xs?si=_ZjF75DAXEQnY2Mn\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n> #### Want to get started with GitLab Duo? [Start a free trial\ntoday!](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/)\n\n\n## Learn more\n\n- [Refactor code into modern languages with AI-powered GitLab\nDuo](https://about.gitlab.com/blog/refactor-code-into-modern-languages-with-ai-powered-gitlab-duo/)\n\n- [Secure by Design principles meet DevSecOps innovation in GitLab\n17](https://about.gitlab.com/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17/)\n\n- [How to secure memory-safe vs. manually managed\nlanguages](https://about.gitlab.com/blog/memory-safe-vs-unsafe/)\n",[803,9,719,186],{"slug":3159,"featured":6,"template":698},"modernizing-a-simple-c-application-to-java-with-gitlab-duo","content:en-us:blog:modernizing-a-simple-c-application-to-java-with-gitlab-duo.yml","Modernizing A Simple C Application To Java With Gitlab Duo","en-us/blog/modernizing-a-simple-c-application-to-java-with-gitlab-duo.yml","en-us/blog/modernizing-a-simple-c-application-to-java-with-gitlab-duo",{"_path":3165,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3166,"content":3172,"config":3178,"_id":3180,"_type":14,"title":3181,"_source":16,"_file":3182,"_stem":3183,"_extension":19},"/en-us/blog/monitor-application-performance-with-distributed-tracing",{"title":3167,"description":3168,"ogTitle":3167,"ogDescription":3168,"noIndex":6,"ogImage":3169,"ogUrl":3170,"ogSiteName":685,"ogType":686,"canonicalUrls":3170,"schema":3171},"Monitor application performance with Distributed Tracing","Learn how Distributed Tracing helps troubleshoot application performance issues by providing end-to-end visibility and seamless collaboration across your organization.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098000/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%288%29_5x6kH5vwjz8cwKgSBh1w11_1750098000511.png","https://about.gitlab.com/blog/monitor-application-performance-with-distributed-tracing","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Monitor application performance with Distributed Tracing\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sacha Guyon\"}],\n        \"datePublished\": \"2024-06-13\",\n      }",{"title":3167,"description":3168,"authors":3173,"heroImage":3169,"date":3175,"body":3176,"category":717,"tags":3177},[3174],"Sacha Guyon","2024-06-13","Downtime due to application defects or performance issues can have devastating financial consequences for businesses. An hour of downtime is estimated to cost firms $301,000 or more, according to [Information Technology Intelligence Consulting's 2022 Global Server Hardware and Server OS Reliability Survey](https://itic-corp.com/server-and-application-by-the-numbers-understanding-the-nines/). These issues often originate from human-introduced changes, such as code or configuration changes.\n\nResolving such incidents requires development and operations teams to collaborate closely, investigating the various components of the system to find the root cause change, and promptly restore the system back to normal operation. However, these teams commonly use separate tools to build, manage, and monitor their application services and infrastructure. This approach leads to siloed data, fragmented communication, and inefficient context switching, increasing the time spent to detect and resolve incidents.\n\nGitLab aims to address this challenge by combining software delivery and monitoring functionalities within the same platform. Last year, we released [Error Tracking](https://docs.gitlab.com/ee/operations/error_tracking.html) as a general availability feature in [GitLab 16.0](https://about.gitlab.com/releases/2023/05/22/gitlab-16-0-released/#error-tracking-is-now-generally-available). Now, we're excited to announce the [Beta release of Distributed Tracing](https://docs.gitlab.com/ee/operations/tracing), the next step toward a comprehensive observability offering seamlessly integrated into the GitLab DevSecOps platform.\n\n## A new era of efficiency: GitLab Observability\n\nGitLab Observability empowers development and operations teams to visualize and analyze errors, traces, logs, and metrics from their applications and infrastructure. By integrating application performance monitoring into existing software delivery workflows, context switching is minimized and productivity is increased, keeping teams focused and collaborative on a unified platform.\n\nAdditionally, GitLab Observability bridges the gap between development and operations by providing insights into application performance in production. This enhances transparency, information sharing, and communication between teams. Consequently, they can detect and resolve bugs and performance issues arising from new code or configuration changes sooner and more effectively, preventing those issues from escalating into major incidents that could negatively impact the business.\n\n## What is Distributed Tracing?\n\nWith Distributed Tracing, engineers can identify the source of application performance issues. A trace represents a single user request that moves through different services and systems. Engineers are able to analyze the timing of each operation and any errors as they occur.\n\nEach trace is composed of one or more spans, which represent individual operations or units of work. Spans contain metadata like the name, timestamps, status, and relevant tags or logs. By examining the relationships between spans, developers can understand the request flow, identify performance bottlenecks, and pinpoint issues.\n\nDistributed Tracing is especially valuable for [microservices architecture](https://about.gitlab.com/topics/microservices/), where a single request may involve numerous service calls across a complex system. Tracing provides visibility into this interaction, empowering teams to quickly diagnose and resolve problems.\n\n![tracing example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098009/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750098009139.png)\n\nFor example, this trace illustrates a how a user request flows through difference services to fetch product recommendations on a e-commerce website:\n\n- `User Action`: This indicates the user's initial action, such as clicking a button to request product recommendations on a product page.\n-  `Web front-end`: The web front-end sends a request to the recommendation service to retrieve product recommendations.\n- `Recommendation service`: The request from the web front-end is handled by the recommendation service, which processes the request to generate a list of recommended products.\n- `Catalog service`: The recommendation service calls the catalog service to fetch details of the recommended products. An alert icon suggests an issue or delay at this stage, such as a slow response or error in fetching product details.\n- `Database`: The catalog service queries the database to retrieve the actual product details. This span shows the SQL query in the database.\n\nBy visualizing this end-to-end trace, developers can identify performance issues – here, an error in the Catalog service – and quickly diagnose and resolve issues across the distributed system.\n\n![End-to-end trace](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098009/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098009140.png)\n\n## How Distributed Tracing works\n\nHere is a breakdown of how Distributed Tracing works.\n\n### Collect data from any application with OpenTelemetry\n\nTraces and spans can be collected using [OpenTelemetry](https://opentelemetry.io/docs/what-is-opentelemetry/), an open-source observability framework that supports a wide array of SDKs and libraries across [major programming languages and frameworks](https://opentelemetry.io/docs/languages/). This framework offers a vendor-neutral approach for collecting and exporting telemetry data, enabling developers to avoid vendor lock-in and choose the tools that best fit their needs.\n\nThis means that if you are already using OpenTelemetry with another vendor, you can send data to us simply by adding our endpoint to your configuration file, making it very easy to try out our features!\n\n![Distributed tracing workflow diagram](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098009/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750098009141.png)\n\n### Ingest and retain data at scale with fast, real-time queries\n\nObservability requires the storage and querying of vast amounts of data while maintaining low latency for real-time analytics. To meet these needs, we developed a horizontally scalable, long-term storage solution using ClickHouse and Kubernetes, based on our [acquisition of Opstrace](https://about.gitlab.com/press/releases/2021-12-14-gitlab-acquires-opstrace-to-expand-its-devops-platform-with-open-source-observability-solution/). This [open-source platform](https://gitlab.com/gitlab-org/opstrace/opstrace) ensures rapid query performance and enterprise-grade scalability, all while minimizing costs.\n\n### Explore and analyze traces effortlessly\nAn advanced, native-level user interface is crucial for effective data exploration. We built such an interface from the ground up, starting with our Trace Explorer, which allows users to examine traces and understand their application's performance:\n- __Advanced filtering:__ Filter by services, operation names, status, and time range. Autocomplete helps simplify querying.\n- __Error highlighting:__ Easily identify error spans in search results.\n- __RED metrics:__ Visualize the Requests rate, Errors rate, and average Duration as a time-series chart for any search in real-time.\n- __Timeline view:__ Individual traces are displayed as a waterfall diagram, providing a complete view of a request distributed across different services and operations.\n- __Historical data:__ Users can query traces up to 30 days in the past.\n\n![Distributed Tracing - image 5](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098009/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750098009141.png)\n\n## How we use Distributed Tracing at GitLab\n[Dogfooding](https://handbook.gitlab.com/handbook/values/#dogfooding) is a core value and practice at GitLab. We've been already using early versions of Distributed Tracing for our engineering and operations needs. Here are a couple example use cases from our teams:\n\n### 1. Debug errors and performance Issues in GitLab Agent for Kubernetes\n\nThe [Environments group](https://handbook.gitlab.com/handbook/engineering/development/ops/deploy/environments/) has been using Distributed Tracing to troubleshoot and resolve issues with the [GitLab Agent for Kubernetes](https://gitlab.com/gitlab-org/cluster-integration/gitlab-agent), such as timeouts or high latency issues. The Trace List and Trace Timeline views offer valuable insights for the team to address these concerns efficiently. These traces are shared and discussed in the [related GitLab issues](https://gitlab.com/gitlab-org/cluster-integration/gitlab-agent/-/issues/386#note_1576431796), where the team collaborates on resolution.\n\n\u003Ccenter>\u003Ci>\"The Distributed Tracing feature has been invaluable in pinpointing where latency issues are occurring, allowing us to focus on the root cause and resolve it faster.\" - Mikhail, GitLab Engineer\u003C/i>\u003C/center>\u003Cp>\n\n### 2. Optimize GitLab’s build pipeline duration by identifying performance bottlenecks\n\nSlow deployments of GitLab source code can significantly impact the productivity of the whole company, as well as our compute spending. Our main repository runs [over 100,000 pipelines every month](https://gitlab.com/gitlab-org/gitlab/-/pipelines/charts). If the time it takes for these pipelines to run changes by just one minute, it can add or remove more than 2,000 hours of work time. That's 87 extra days!\n\nTo optimize pipeline execution time, GitLab's [platform engineering teams](https://handbook.gitlab.com/handbook/engineering/infrastructure/) utilize a [custom-built tool](https://gitlab.com/gitlab-com/gl-infra/gitlab-pipeline-trace) that converts GitLab deployment pipelines into traces.\n\nThe Trace Timeline view allows them to visualize the detailed execution timeline of complex pipelines and pinpoint which jobs are part of the critical path and slowing down the entire process. By identifying these bottlenecks, they can optimize job execution – for example, making the job fail faster, or running more jobs in parallel – to improve overall pipeline efficiency.\n\n![Distributed Tracing - image 6](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098009/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098009143.gif)\n\n[The script is freely available](https://gitlab.com/gitlab-com/gl-infra/gitlab-pipeline-trace), so you can adapt it for your own pipelines.\n\n\u003Ccenter>\u003Ci>\"Using Distributed Tracing for our deployment pipelines has been a game-changer. It's helped us quickly identify and eliminate bottlenecks, significantly reducing our deployment times.\"- Reuben, GitLab Engineer\u003C/i>\u003C/center>\u003Cp>\n\n## What's coming next?\n\nThis release is just the start: In the next few months, we'll continue to expand our observability and monitoring features with the upcoming Metrics and Logging releases. Check out [our Observability direction page](https://about.gitlab.com/direction/monitor/platform-insights/) for more info, and keep an eye out for updates!\n\n## Join the private Beta\n\nInterested in being part of this exciting journey? [Sign up to enroll in the private Beta](https://docs.gitlab.com/operations/observability/) and try out our features. Your contribution can help shape the future of observability within GitLab, ensuring our tools are perfectly aligned with your needs and challenges.\n\n> Help shape the future of GitLab Observability. [Join the Distributed Tracing Beta.](https://docs.gitlab.com/operations/observability/)",[2729,695,1133,9,1319],{"slug":3179,"featured":91,"template":698},"monitor-application-performance-with-distributed-tracing","content:en-us:blog:monitor-application-performance-with-distributed-tracing.yml","Monitor Application Performance With Distributed Tracing","en-us/blog/monitor-application-performance-with-distributed-tracing.yml","en-us/blog/monitor-application-performance-with-distributed-tracing",{"_path":3185,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3186,"content":3192,"config":3199,"_id":3201,"_type":14,"title":3202,"_source":16,"_file":3203,"_stem":3204,"_extension":19},"/en-us/blog/more-granular-product-usage-insights-for-gitlab-self-managed-and-dedicated",{"title":3187,"description":3188,"ogTitle":3187,"ogDescription":3188,"noIndex":6,"ogImage":3189,"ogUrl":3190,"ogSiteName":685,"ogType":686,"canonicalUrls":3190,"schema":3191},"More granular product usage insights for GitLab Self-Managed and Dedicated","Learn how event-level data helps GitLab improve the DevSecOps platform. Opt-out option is always available.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099221/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2811%29_78Dav6FR9EGjhebHWuBVan_1750099221690.png","https://about.gitlab.com/blog/more-granular-product-usage-insights-for-gitlab-self-managed-and-dedicated","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"More granular product usage insights for GitLab Self-Managed and Dedicated\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Tanuja Jayarama Raju\"}],\n        \"datePublished\": \"2025-03-26\",\n      }",{"title":3187,"description":3188,"authors":3193,"heroImage":3189,"date":3195,"body":3196,"category":717,"tags":3197,"updatedDate":3198},[3194],"Tanuja Jayarama Raju","2025-03-26","In GitLab 18.0, we plan to enable event-level product usage data collection from GitLab Self-Managed and GitLab Dedicated instances – while ensuring privacy, transparency, and customer control every step of the way.\n\nWe know data powers valuable insights to help you understand the performance of your DevSecOps practices. Similarly, platform usage data enables us to prioritize the investments and product improvements that drive more impact for you.\t\n\nHistorically, we’ve collected both event and aggregate product usage data from GitLab.com. However, for GitLab Self-Managed and Dedicated instances, the absence of event data has required the GitLab Customer Success team to rely on manual data extraction methods to gather key insights, including job runtimes, runner usage for cost optimization, pipeline success rates, and deployment frequency for assessing DevSecOps maturity. Access to event-level data reduces the need for workarounds and enables more efficient reporting and optimizations. \n\n**Note: Throughout this blog, when we discuss event collection, we are exclusively referring to the collection of events for all features except those included in GitLab Duo. For more details, please refer to our [Customer Product Usage Information page](https://handbook.gitlab.com/handbook/legal/privacy/customer-product-usage-information/).**\n\n## Understanding event-level data\n\nEvent-level data tracks product usage interactions within the GitLab platform, such as initiating CI/CD pipelines, merging a merge request, triggering a webhook, or creating a new issue. User identifiers are pseudonymized to protect privacy, and GitLab does not undertake any processes to re-identify or associate the metrics with individual users. Importantly, event-level data does not include source code or other customer-created content stored within GitLab. To learn more, visit our [Customer Product Usage Information page](https://handbook.gitlab.com/handbook/legal/privacy/customer-product-usage-information/) and [event data documentation](https://docs.gitlab.com/administration/settings/event_data/).\n\nHere is an example of a data sample we collect:\n\n![event-level data - code example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099231/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750099230972.png)\n\n## How event-level data collection benefits you\n\nEvent-level data offers a wealth of insights beyond what aggregated data can provide. It enables slicing and aggregating pseudonymized system instrumentation to identify trends, highlight unused or underused areas, and signal product improvements. By analyzing usage patterns in context, we can understand which features are used, how, and in what sequence. This visibility uncovers bottlenecks and optimization opportunities that aggregated data would miss.\n\n* **In-depth feature usage analysis**  \n  Rather than just knowing which features are used weekly or monthly, event-level data provides a clearer picture of how users experience GitLab and the frequency of their usage. This enables us to gain a deeper understanding of user behavior and highlights areas for improvement.  \n* **Trend discovery**  \n  Event-level data helps identify trends in GitLab adoption that can’t be seen with rolling aggregates. With these insights, the GitLab Customer Success team can help customers make more informed decisions on feature adoption and usage, improving overall efficiency.  \n* **Smarter product improvements**  \n  Event-level data gives GitLab’s Product team a clearer picture of real-world customer needs. By analyzing usage patterns, product improvements can be aligned with customer priorities, leading to continuous enhancements that make GitLab more powerful, efficient, and user-friendly.  \n* **Custom insights for your use case**  \n  Event-level data will enable GitLab Customer Success to provide tailored insights based on your organization's overall product usage without identifying individual users. This flexibility helps our teams provide recommendations that address your unique needs and challenges.\n\n## You stay in control of your data\n\n![event-level data - screen of choices](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099231/Blog/Content%20Images/Blog/Content%20Images/Screenshot_2025-04-02_at_12.14.12_PM_aHR0cHM6_1750099230972.png)\n\nWe’re committed to rolling this out with a strong focus on privacy. Here’s what we’re doing to ensure transparency and choice:\n\n✅ **Pre-deployment early opt-out** – Data sharing can be disabled by instance admins in the 17.11 release before event collection begins in 18.0. The pre-deployment early opt-out option will remain available after 18.0; just upgrade to 17.11 first and disable data sharing.\n\n✅ **Proactive communication** – Updates on the progress of this initiative shared via blog posts, emails to GitLab admins, and updates through your GitLab account team.\n\n ✅ **No third-party collectors** - GitLab’s event-level instrumentation will not use any third-party collectors; it’s built and operated by GitLab, and events are sent directly to GitLab-managed environments, similar to [Service Ping](https://handbook.gitlab.com/handbook/legal/privacy/customer-product-usage-information/#service-ping-formerly-known-as-usage-ping).\n\n✅ **Detailed documentation** – Detailed documentation is available [here](https://docs.gitlab.com/administration/settings/event_data/), and a list of FAQs is available [here](http://handbook.gitlab.com/handbook/legal/privacy/product-usage-events-faq/).\n\n✅ **De-identification approach** – We will continue to apply aggregation and/or pseudonymization to any event-level data collected from Self-Managed and Dedicated.\n\n## What’s next\n\n* **Product enhancements (coming up!)** - Improvements to GitLab user experiences and adoption insights made possible by event-level data.\n\n*Disclaimer: This blog contains information related to upcoming products, features, and functionality. It is important to note that the information in this blog is for informational purposes only. Please do not rely on this information for purchasing or planning purposes. As with all projects, the items mentioned in this blog and linked pages are subject to change or delay. The development, release, and timing of any products, features, or functionality remain at the sole discretion of GitLab Inc.*",[717,9,695],"2025-05-14",{"slug":3200,"featured":91,"template":698},"more-granular-product-usage-insights-for-gitlab-self-managed-and-dedicated","content:en-us:blog:more-granular-product-usage-insights-for-gitlab-self-managed-and-dedicated.yml","More Granular Product Usage Insights For Gitlab Self Managed And Dedicated","en-us/blog/more-granular-product-usage-insights-for-gitlab-self-managed-and-dedicated.yml","en-us/blog/more-granular-product-usage-insights-for-gitlab-self-managed-and-dedicated",{"_path":3206,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3207,"content":3213,"config":3218,"_id":3220,"_type":14,"title":3221,"_source":16,"_file":3222,"_stem":3223,"_extension":19},"/en-us/blog/new-scheduled-reports-generation-tool-simplifies-value-stream-management",{"title":3208,"description":3209,"ogTitle":3208,"ogDescription":3209,"noIndex":6,"ogImage":3210,"ogUrl":3211,"ogSiteName":685,"ogType":686,"canonicalUrls":3211,"schema":3212},"New Scheduled Reports Generation tool simplifies value stream management","Proactively receive the most recent metrics from the GitLab Value Streams Dashboard, streamlining the reporting process. This walkthrough shows you how.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749669134/Blog/Hero%20Images/blog-image-template-1800x945__17_.png","https://about.gitlab.com/blog/new-scheduled-reports-generation-tool-simplifies-value-stream-management","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"New Scheduled Reports Generation tool simplifies value stream management\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Haim Snir\"}],\n        \"datePublished\": \"2024-06-20\",\n      }",{"title":3208,"description":3209,"authors":3214,"heroImage":3210,"date":3215,"body":3216,"category":1277,"tags":3217},[2746],"2024-06-20","Optimizing processes and performance is crucial for staying competitive in the fast-paced world of software development. [GitLab Value Stream Management (VSM)](https://www.youtube.com/watch?v=8pLEucNUlWI) is a powerful solution that helps organizations achieve this by providing a holistic view of the entire software delivery lifecycle. VSM enables teams to measure, manage, and improve their workflows, ensuring that every step adds value and minimizes waste. GitLab VSM also includes [AI Impact Analytics](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/), which helps managers quantify the impact of [GitLab Duo](https://about.gitlab.com/gitlab-duo/), our AI-powered suite of features to power DevSecOps workflows, on productivity, providing deeper insights into how AI enhances developer efficiency. Now, we are announcing the next step in this VSM journey: Scheduled Reports Generation, available now.\n\nWith the Scheduled Reports Generation tool, value stream management becomes easier and more effective. Scheduled Reports Generation is designed to streamline the reporting process, providing you with the most recent [metrics from the Value Streams Dashboard](https://docs.gitlab.com/ee/user/analytics/value_streams_dashboard.html#dashboard-metrics-and-drill-down-reports), delivered on a scheduled basis.\n\nThe Value Streams Dashboard tracks key metrics throughout the software development lifecycle, assesses the impact of process improvements, and drills down into roadblocks. It helps to compare best practices across teams in turn improving workflow and delivering customer value faster.\n\n> Learn more with our [Value Streams Dashboard tutorial](https://about.gitlab.com/blog/getting-started-with-value-streams-dashboard/).\n\n## Why scheduled VSM Reports are important\n\nScheduled Reports Generation provides software managers a powerful partner in their quest for continuous improvement. This tool offers the ability to automate the creation and distribution of detailed value stream reports across the software delivery lifecycle. Here’s why this is valuable:\n\n1. **Consistent monitoring:** Having automated reports ensures that software managers receive regular updates on critical metrics without manual intervention. This consistency helps in maintaining a continuous feedback loop.\n\n2. **Data-driven decision-making:** With up-to-date and accurate data at their fingertips, managers can make better and faster decisions, driving better results.\n\n3. **Time savings:** Automating report generation frees up valuable time for managers, allowing them to focus on strategic initiatives rather than routine data collection and analysis.\n\n### Inside the Scheduled Reports Generation tool\n\nHere is how the VSM tool works:\n\n1. The VSM reporting tool is a [CI/CD Catalog component](https://about.gitlab.com/blog/ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch/) that allows you to periodically schedule reports.\n\n2. These reports collect metrics from projects or groups via the public GitLab GraphQL API and are built using [GitLab Flavored Markdown](https://docs.gitlab.com/ee/user/markdown.html).\n\n3. As the final step, the tool opens an issue in the designated project, complete with a markdown comparison metrics table, as shown in the example below.\n\n![Scheduled reports generation - issue generation](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749677009/Blog/Content%20Images/Screenshot_2024-06-20_at_18.38.05.png)\n\n> To learn more and for additional examples, please visit the [Scheduled Reports Generation's README file](https://gitlab.com/components/vsd-reports-generator#example-for-monthly-executive-value-streams-report).\n\n### Get to know the Value Streams Dashboard\nWatch this intro video to get familiar with Value Streams Dashboard.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/8pLEucNUlWI?si=aIdrvREPVBwfC4wM\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## Additional resources\n- [Getting started with the new GitLab Value Streams Dashboard](https://about.gitlab.com/blog/getting-started-with-value-streams-dashboard/)\n- [Developing GitLab Duo: AI Impact analytics dashboard measures the ROI of AI](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/)",[695,9,1133],{"slug":3219,"featured":6,"template":698},"new-scheduled-reports-generation-tool-simplifies-value-stream-management","content:en-us:blog:new-scheduled-reports-generation-tool-simplifies-value-stream-management.yml","New Scheduled Reports Generation Tool Simplifies Value Stream Management","en-us/blog/new-scheduled-reports-generation-tool-simplifies-value-stream-management.yml","en-us/blog/new-scheduled-reports-generation-tool-simplifies-value-stream-management",{"_path":3225,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3226,"content":3232,"config":3238,"_id":3240,"_type":14,"title":3241,"_source":16,"_file":3242,"_stem":3243,"_extension":19},"/en-us/blog/next-generation-gitlab-container-registry-goes-ga",{"title":3227,"description":3228,"ogTitle":3227,"ogDescription":3228,"noIndex":6,"ogImage":3229,"ogUrl":3230,"ogSiteName":685,"ogType":686,"canonicalUrls":3230,"schema":3231},"Next-generation GitLab container registry goes GA","Starting in GitLab 17.3, GitLab self-managed instances can access the generally available container registry, which features efficient zero-downtime garbage collection and other benefits.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662332/Blog/Hero%20Images/blog-image-template-1800x945__23_.png","https://about.gitlab.com/blog/next-generation-gitlab-container-registry-goes-ga","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Next-generation GitLab container registry goes GA\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Tim Rizzi\"}],\n        \"datePublished\": \"2024-07-23\",\n      }",{"title":3227,"description":3228,"authors":3233,"heroImage":3229,"date":3234,"body":3235,"category":717,"tags":3236},[925],"2024-07-23","Last year, we embarked on an ambitious journey to [re-architect the GitLab container registry](https://gitlab.com/gitlab-org/container-registry/-/issues/199) and unlock powerful new capabilities like zero-downtime garbage collection. After successfully migrating GitLab.com to this next-generation registry, we [opened up a beta program](https://about.gitlab.com/blog/gitlabs-next-generation-container-registry-is-now-available/) for self-managed customers to test out the new architecture and provide feedback.\n\nThe results from the beta program have been outstanding – participants are already realizing major benefits, including the following:\n\n- significant storage cost and maintenance time savings from efficient zero-downtime garbage collection, with no required downtime or manual interventions\n- improved performance and reliability for tag cleanup policies and the container registry API and UI\n- early access to new features like better sorting/filtering and storage usage visibility\n\nBased on the positive feedback and successful migrations during the beta, we are excited to announce that the next-generation GitLab container registry will become generally available – but off by default – for self-managed deployments starting with GitLab 17.3.\n\nBelow are the goals and non-goals for reaching this point. The goals are what we need to have in place to officially call this feature GA. The non-goals clarify what will not be present or required at the start of GA support for bringing your own database; however, these features may be added later.\n\n__Goals__\n- The import process is free of known bugs.\n- Import documentation reflects known best practices and addresses feedback from the [beta program](https://gitlab.com/gitlab-org/gitlab/-/issues/423459).\n- Registry API, metadata database, and zero-downtime garbage collection are stable and reliable.\n- Able to automatically apply database schema migrations for Charts installs during upgrades.\n- Provide registry database as an opt-in improvement.\n\n__Non-goals__\n- Automatically provision registry database.\n- Automatically apply database schema migrations for omnibus installs during upgrades.\n- Automatically import object storage data.\n- Provide Geo support to ensure your registry is highly available.\n\nFor existing self-managed instances, here's what you can expect:\n\n- In GitLab 17.3, the new registry will be included, but disabled by default to allow time for planning migrations.\n- Enabling the database will be an opt-in process outlined in the [documentation](https://docs.gitlab.com/ee/administration/packages/container_registry_metadata_database.html).\n- The legacy container registry will still receive security updates, but new features and improvements will only be developed for the next-gen version.\n- We will target GitLab 19.0 for the legacy registry to stop being supported after over a year of co-existence.\n- Our goal is to make this transition as seamless as possible while putting customers in control of their migration timeline. The [documentation](https://docs.gitlab.com/ee/administration/packages/container_registry_metadata_database.html) covers all the details on how to plan and execute the move to the next-gen registry.\n\nThis architectural investment lays the foundation for an even more powerful container registry experience in the years ahead. Some of the significant improvements on our roadmap include:\n\n- protected repositories and immutable tags\n- improved Helm chart management\n- improved support for signing and attestations\n- many more UX/UI enhancements are only possible with the database architecture\n\nWe couldn't have reached this GA milestone without the valuable feedback from our beta participants. As always, please continue to share your experiences so we can make the GitLab container registry an indispensable part of your DevSecOps toolchain.\n\n> You can try the container registry today with a [free trial of GitLab Ultimate](https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/blog&glm_content=default-saas-trial).",[9,695,3237,717],"cloud native",{"slug":3239,"featured":91,"template":698},"next-generation-gitlab-container-registry-goes-ga","content:en-us:blog:next-generation-gitlab-container-registry-goes-ga.yml","Next Generation Gitlab Container Registry Goes Ga","en-us/blog/next-generation-gitlab-container-registry-goes-ga.yml","en-us/blog/next-generation-gitlab-container-registry-goes-ga",{"_path":3245,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3246,"content":3252,"config":3257,"_id":3259,"_type":14,"title":3260,"_source":16,"_file":3261,"_stem":3262,"_extension":19},"/en-us/blog/online-retailer-bol-tackles-growing-compliance-needs-with-gitlab",{"title":3247,"description":3248,"ogTitle":3247,"ogDescription":3248,"noIndex":6,"ogImage":3249,"ogUrl":3250,"ogSiteName":685,"ogType":686,"canonicalUrls":3250,"schema":3251},"Online retailer bol tackles growing compliance needs with GitLab","Learn how GitLab helps the major international company adhere to regulations while increasing development efficiency.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749665465/Blog/Hero%20Images/blog-image-template-1800x945__15_.png","https://about.gitlab.com/blog/online-retailer-bol-tackles-growing-compliance-needs-with-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Online retailer bol tackles growing compliance needs with GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Julie Griffin\"}],\n        \"datePublished\": \"2024-06-12\",\n      }",{"title":3247,"description":3248,"authors":3253,"heroImage":3249,"date":3254,"body":3255,"category":1073,"tags":3256},[690],"2024-06-12","[Bol](https://www.bol.com/nl/nl/), which uses GitLab Ultimate, is one of the largest online retailers in the Netherlands and Belgium. The company offers a product range of 38 million items alongside 50,000 sales partners who sell their goods on its marketplace. Bol relies on innovative technology to increase development efficiency, adhere to compliance regulations, and maintain trust across its extensive customer base.\n\nBol equips its teams with the GitLab DevSecOps platform, enabling its developers to quickly and securely ship projects, while saving the team thousands of manual hours on compliance checks.\n\n“GitLab is helping us stay flexible and competitive as we grow, and as the requirements that our software and our developers need to comply with grow,” says Guus Houtzager, engineering manager on bol’s Continuous Integration and Continuous Deployment team. “That's the biggest challenge that we had and we tackled it with GitLab.”\n\nHowever, as bol's revenue grew, so did the compliance rules and regulations it had to adhere to. The company needs to continually adapt its software to meet strict, and often updated regulations, such as the General Data Protection Regulation (GDPR), International Organization for Standardization (ISO) requirements, and the EU Artificial Intelligence Act.\n\nAfter adopting GitLab Community in 2016 and GitLab Premium several years later, bol upgraded to GitLab Ultimate in 2024 to [meet the growing compliance load](https://about.gitlab.com/solutions/security-compliance/) and help its teams tackle projects faster and more efficiently.\n\n![Guus Houtzager of bol - quote box](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749675638/Blog/Content%20Images/bol_Blog_-_Guus.png)\n\n## Saving thousands of developer hours per month \n\nGitLab enables bol’s DevSecOps teams to set up policies that automate compliance configurations and checks. This helps them achieve consistency and scalability in their compliance efforts, and reduce the risk of human error. With compliance guardrails in place, its team of 850 developers can focus more of their energy on creating innovative, secure software.\n\n“We bought GitLab Ultimate so we can have compulsory compliance pipelines that ensures our teams are working within compliance regulations from the start,” says Houtzager.\n\nBy allowing developers to focus on coding without the burden of compliance regulations, the bol development team dramatically increased its efficiency.\n\n“This has saved our developers several thousands of hours in total per month,” says Houtzager.\n\nIn addition to time savings, the team is now confident it can handle any compliance roadblocks that come its way.\n\n“We know that GitLab is going to help us with compliance and software security,” says Houtzager. “Even if we get new regulations, we have a toolkit, through GitLab, that enables us to follow and comply with any new regulations. We don't know exactly what will happen, but we know we are in a position to handle whatever comes our way.”\n\n## Shifting left to protect customers and its business\n\nAs a large player in the European retail world, trust is a key pillar of bol’s business model. The company handles a large quantity of personal data, such as addresses and order details. While regulatory fines are a concern, so is maintaining trust with its customer base. That only emphasizes the importance of security.\n\n“Most of the people in the Netherlands and Belgium have bought something from us in the past and people trust us,” says Houtzager. “They trust that we handle their payment details properly. We don't sell your Personal Identifiable Information PII data, and they trust us to keep it safe and secure.”\n\nTo protect customer data and its business, bol shifted security left, enabling developers to find errors and vulnerabilities earlier in the development process. However, shifting left without the right tools in place could lead to developers spending countless hours trying to correct any problems they find.\n\n“If you shift left without also providing teams the tools, support, and processes to make sure that they can do this work in an efficient manner, teams get bogged down in either procedures or manual work,” says Houtzager.\n\nWith GitLab Ultimate, bol is able to set up the layout and permission model to meet the company’s security requirements, giving developers the freedom to quickly build and ship projects while protecting customer and business data. The DevSecOps platform has the added benefit of tracking the changes and fixes that developers make and noting them in compliance records. \n\n## Looking ahead to AI\n\nMoving forward, bol plans to use more GitLab Ultimate features, like cloud integration, and artificial intelligence (AI) capabilities, along with even more security features. \n\nFrom building secure software faster to improving the developer experience, bol looks forward to one day using AI-powered [GitLab Duo](https://about.gitlab.com/gitlab-duo/) to help them scale their software development. \n\n“The situation must be right for us to be able to use it and then we will definitely take a look at how it can help us,” says Houtzager. “We, like everybody else, are looking at where AI can help us to improve situations across the entire software development life cycle. So if someone is building code, how can it help them? If someone is working on other aspects of the process, how can it help them?” \n\n> Read more customer stories on [the GitLab customers page](https://about.gitlab.com/customers/).\n",[109,693,1075,9],{"slug":3258,"featured":6,"template":698},"online-retailer-bol-tackles-growing-compliance-needs-with-gitlab","content:en-us:blog:online-retailer-bol-tackles-growing-compliance-needs-with-gitlab.yml","Online Retailer Bol Tackles Growing Compliance Needs With Gitlab","en-us/blog/online-retailer-bol-tackles-growing-compliance-needs-with-gitlab.yml","en-us/blog/online-retailer-bol-tackles-growing-compliance-needs-with-gitlab",{"_path":3264,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3265,"content":3271,"config":3276,"_id":3278,"_type":14,"title":3279,"_source":16,"_file":3280,"_stem":3281,"_extension":19},"/en-us/blog/origin-of-devsecops-platform-category",{"title":3266,"description":3267,"ogTitle":3266,"ogDescription":3267,"noIndex":6,"ogImage":3268,"ogUrl":3269,"ogSiteName":685,"ogType":686,"canonicalUrls":3269,"schema":3270},"Disagree, commit, and disagree: How a lazy solution became a category","Find out the origin story of the DevSecOps category.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749679881/Blog/Hero%20Images/flowercomingthroughsidewalkcrack.png","https://about.gitlab.com/blog/origin-of-devsecops-platform-category","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Disagree, commit, and disagree: How a lazy solution became a category\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sid Sijbrandij\"}],\n        \"datePublished\": \"2023-08-30\",\n      }",{"title":3266,"description":3267,"authors":3272,"heroImage":3268,"date":1215,"body":3274,"category":1133,"tags":3275},[3273],"Sid Sijbrandij","\nA few months ago, GitLab - and the DevOps Platform category - reached a big milestone. Two influential analyst firms, [Gartner](https://about.gitlab.com/blog/gitlab-leader-gartner-magic-quadrant-devops-platforms/) and [Forrester](https://about.gitlab.com/blog/gitlab-leader-forrester-wave-integrated-software-delivery-platforms/), issued reports that validate the market is moving from point solutions to a platform. They officially recognized DevOps platforms as a category. A category we created. \n\nThis is the story of how we did it.\n\nI am thrilled that we created a category. Very few companies are able to do so. Other examples are Dropbox for the file hosting service category; Hubspot for inbound marketing; and Slack for searchable logs of all communications and knowledge. The backstory is that we did not start with a vision for creating a category. GitLab didn’t even begin as a business. It started with a programmer’s need for a great open source collaboration tool. \n\nNow, nearly 12 years after GitLab’s [very first commit](https://gitlab.com/gitlab-org/gitlab-foss/-/commit/9ba1224867665844b117fa037e1465bb706b3685), I want to share what we learned on the journey to creating the DevOps Platform category.\n\n## Category design begins with solving your own problem\nDmitriy Zaporozhets needed a tool to collaborate with his team. His employer at the time wasn’t willing to buy the tool he wanted, so he decided to build it himself. He created GitLab in 2011 from his home in Ukraine.\n\nTogether with Valeriy Sizov, Dmitriy started to build GitLab as a developer collaboration tool based on Git. Developers from around the world quickly began using it. In the first year, 300 people contributed to improving it.\n\nGitLab was not founded with a grand plan or a 10-year vision to create a single platform for the entire software development lifecycle. The reality is that GitLab began with one person who had a need and built a solution to meet it.\n\n## Categories are discovered, not planned\nOne of the things I respect most about Dmitriy is that he built GitLab as open source, allowing others to use his ideas and build on them in their own ways. He was so committed to open source that he was supportive of me commercializing his work.\n\nI encountered GitLab for the first time in 2012. I recognized the value that it could provide for other software companies, but I also saw the challenges in installing and managing it. Not everyone had the means to do that. I saw the potential for GitLab to be commercialized as a SaaS business: cloud-based source code management (SCM) for everyone.\n\nI was nervous about commercializing Dmitriy’s work, so I reached out to tell him what I was working on. He was happy that what I was doing could help GitLab become more popular and attract even more community contributions, which it did.\n\nThis was our exchange: \n\n\n![Emails between Sid and Dmitriy](https://about.gitlab.com/images/blogimages/devsecopsoriginmessages.png){: .shadow}\n\n\nIn late 2012, similar to how Dmitriy made an SCM tool for his own need, he built his own continuous integration (CI) tool called GitLab CI, a tool that ran tests to check the code for conflicts.\n\nMeanwhile, large organizations began adopting GitLab, and Dmitriy tweeted that he wanted to work on GitLab full-time. I got in touch with him to work out an arrangement for him to join GitLab, the company. But when I went to the local Western Union branch to make a wire transfer, I had to convince the teller that I knew Dmitriy and was not falling victim to wire transfer fraud - a common issue at the time.\n\nWe then introduced [GitLab Enterprise Edition](https://about.gitlab.com/releases/2013/07/22/announcing-gitlab-enterprise-edition/) with features asked for by larger organizations. \n\nThen, in 2015, we noticed that a community contributor named Kamil Trzciński built a far better runner than we did (ours was in Ruby and single-threaded, his was in Go and multi-threaded). It was so much better that we decided to adopt his runner as the standard.\n\nThrough iteration, building on each other’s ideas, and being open to ideas from outside our company, we continued to build two great tools for SCM and CI. \n\nHowever, I admit that there were critical moments when our willingness to allow others to contribute would be tested. When Kamil joined GitLab full-time we could not have predicted that he would help us discover a new category. Not by contributing a better CI runner but by changing the way software is developed. \n\nKamil suggested a radical idea: to integrate GitLab SCM and GitLab CI into one tool.\n\n## Disagree, commit, and disagree\nDmitriy and I disagreed with Kamil. Dmitriy believed in the Unix philosophy where one program should do one thing well; if you want a program to do something else, start a new one. I thought that customers wanted separate tools for separate use cases. The market was filled with specialized point solutions.\n\nMany business leaders say, “Disagree and commit,” and we did. We disagreed, and committed to continuing to build two different products.\n\nBut Kamil persisted in making a strong case for [why SCM and CI should be integrated](https://about.gitlab.com/blog/gitlab-hero-devops-platform/). This is when our operating principle of [disagree, commit, and disagree](https://handbook.gitlab.com/handbook/values/#disagree-commit-and-disagree) was born. Every decision can be changed, and the best decisions should often be made despite management’s opinion.\n\nDmitry and I relented and took Kamil’s suggestion over our opinion and the opinion of the market. \n\nIt was a lazy choice because combining SCM and CI would mean having only one Ruby on Rails app to maintain. We could avoid duplicating the interface and the data, making it more efficient to develop code. But it also ended up being a far better user experience, giving customers a much faster way to set up CI, and faster cycle times by not having to switch between apps. GitLab became a platform with one UI, one data store, one way to serve up information, and one way for a company to collaborate and be on the same page at the same time.\n\nBy taking the suggestion of someone new to the team and creating [the world’s first DevOps platform](https://about.gitlab.com/blog/how-ten-steps-over-ten-years-led-to-the-devops-platform/), we changed the course of our company and, eventually, the whole software development industry. I am proud to be a part of the DevSecOps Platform story because it is a story about allowing everyone to contribute, especially when someone else has the best idea. \n\nIt is important to disagree and commit but still disagree. That is how Dmitriy and I realized that there could be one platform for the entire software development lifecycle, and eight years later, Forrester, Gartner, and the market see it, too.\n\nToday, we have a [DevSecOps platform](https://about.gitlab.com/platform/?stage=plan). \n\nLooking to the future, we hope to create another category: [AllOps](https://about.gitlab.com/company/vision/), a single application, for all R&D that includes DevSecOps, ModelOps DataOps, and Service Desk. \n\nIn the future, we will expand support for [ModelOps and DataOps](https://about.gitlab.com/direction/modelops/) to give customers the ability to manage data and its associated AI/ML models in a similar fashion to their software projects. \n\nAnd, because customers need the ability to triage application incidents directly where their applications are built and deployed, we will continue to expand our [Service Desk](https://docs.gitlab.com/ee/user/project/service_desk/index.html) offering.\n\nIt is GitLab’s mission to ensure that everyone can contribute. Our vision for AllOps moves us further in that direction - to deliver a single application for all innovation.\n",[1133,1423,9,951],{"slug":3277,"featured":6,"template":698},"origin-of-devsecops-platform-category","content:en-us:blog:origin-of-devsecops-platform-category.yml","Origin Of Devsecops Platform Category","en-us/blog/origin-of-devsecops-platform-category.yml","en-us/blog/origin-of-devsecops-platform-category",{"_path":3283,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3284,"content":3290,"config":3297,"_id":3299,"_type":14,"title":3300,"_source":16,"_file":3301,"_stem":3302,"_extension":19},"/en-us/blog/pair-gitlab-and-the-good-docs-project-template-to-improve-release-notes",{"title":3285,"description":3286,"ogTitle":3285,"ogDescription":3286,"noIndex":6,"ogImage":3287,"ogUrl":3288,"ogSiteName":685,"ogType":686,"canonicalUrls":3288,"schema":3289},"Pair GitLab and The Good Docs Project template to improve release notes","Creating compelling, detailed, human-readable notes for software releases is important. Using GitLab and this template from The Good Docs Project makes it easier.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099541/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_432673748_5xWPNsktdz2QChWhl16jGq_1750099540656.jpg","https://about.gitlab.com/blog/pair-gitlab-and-the-good-docs-project-template-to-improve-release-notes","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Pair GitLab and The Good Docs Project template to improve release notes\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Aaron Peters, Member, Good Docs Project\"}],\n        \"datePublished\": \"2024-01-23\",\n      }",{"title":3285,"description":3286,"authors":3291,"heroImage":3287,"date":3293,"body":3294,"category":3295,"tags":3296},[3292],"Aaron Peters, Member, Good Docs Project","2024-01-23","Release notes allow software users to quickly understand the changes that come with the latest version of software. They also allow software publishers to highlight changes as important, or provide crucial information about the impact an upgrade may have. Some tools allow developers to \"generate\" release notes based on sources of data (such as completed items in DevOps systems), but notes produced this way tend to simply list changes without context. Writing release notes, however, provides teams with the opportunity to \"tell the story\" of the changes the new software version will bring.\n\nThough this process certainly requires a greater investment of time than publishing a basic changelog does, your users will certainly appreciate the results: release notes that explain the key elements of the release (such as new features, improvements, and known issues) in a well-organized, human-readable way.\n\n[The Good Docs Project's](https://thegooddocsproject.dev/welcome/) release notes template is designed to help you do exactly that. And the combination of GitLab's work management platform and our own [Release Notes template](https://gitlab.com/tgdp/templates/-/tree/main/release-notes?ref_type=heads) makes the job of putting out good, informative release notes easier.\n\n## The anatomy of quality release notes\n\nRelease notes that provide readers with a good picture of the version's changes require two primary inputs:\n\n- **A list of the changes included in the release**\n  At The Good Docs Project, all the management of the work of our contributors occurs in GitLab. So it's easy to refer to our release plans to identify which additions and improvements were completed and included in the release.\n- **A description of those changes including reasoning, importance, and impact**\n  This is where our project's Release Notes template can assist. Rather than staring at a blank page, wondering where to start, users can begin to fill in our template step-by-step, adjusting to taste.\n\nWe'll walk through each of these steps in the following sections as they occurred when creating the release notes to [our recent Dragon release](https://gitlab.com/tgdp/templates/-/releases/v1.1.0).\n\n## Gathering a release's changes\n\nAt The Good Docs Project, we use GitLab features — including setting milestones, creating/assigning issues, and tagging releases — to get our work out into the community (our prior blog post here at GitLab describes this process). The platform allows our worldwide contributor base to easily discover new things to work on and update everyone on their progress once they select something. When the time comes to package a release, it brings the added benefit of a tidy list of issues included in the project at the time of release.\n\n\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752176084/Blog/hxo08y06apkumwuwm80c.png\" alt=\"The Milestone screen in GitLab provides an easy-to-scan list of work included in the release\" width=\"100%\" height=\"auto\">\n\nWhen creating the release notes for our project's Dragon milestone, we reviewed all the items included in the **Closed** column on the Milestone screen. This allowed us to pick the most important changes to highlight, while leaving out issues that wouldn't significantly impact a user's experience.\n\n## Crafting the release notes\n\nEquipped with a list of all the key updates in the release, we start writing the release notes. Our project's [Release Notes template](https://gitlab.com/tgdp/templates/-/blob/main/release-notes/template-release-notes.md?ref_type=heads) provides a ready-made Markdown skeleton comprised of key sections based on our contributors' research and experience. The accompanying [usage guide](https://gitlab.com/tgdp/templates/-/blob/main/release-notes/guide-release-notes.md?ref_type=heads) and [example of the template in action](https://gitlab.com/tgdp/templates/-/blob/main/release-notes/example-release-notes.md?ref_type=heads) provides additional tips and suggestions for writing effective release notes. The latter references our **Chronologue** project, a fictional telescope and application that can see through time, which is naturally well-documented.\n\n\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752176091/Blog/jcpfxjqb5jpidssm3jlr.png\" alt=\"The Release Notes template comes ready to populate with 'the story' of your latest release\" width=\"100%\" height=\"auto\">\n\nOf course, our template is simply a starting point. Teams should always feel free to add sections where they make sense, remove them where they don't, and make the style of it their own. For example, we left out the **Bug fixes** and **Known issues** sections in our latest Dragon release notes, instead focusing on the new additions and improvements this release brought.\n\n## Adding release notes to the release\n\nGitLab's build tools also make it easy to add our notes while actually creating the release. First, we tagged one of our project's commits, then created a release from the tag. On GitLab's **Releases > New** screen, we can copy and paste the Markdown we wrote to automatically format the release notes.\n\n\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752176096/Blog/otwduhvokgnqclon4ugx.png\" alt=\"Our templates are already in Markdown format, so when it's time to paste them into the release it works automagically!\" width=\"100%\" height=\"auto\">\n\nAnd just like that our release notes are done. With the assistance of the template, they required just an hour to write. And after an additional half-hour of work creating the release, we're ready to send our work out to the community. Our experience using the combination of GitLab and our templates has made the process of shipping our templates a piece of cake.\n\nIf you'd like to check out our templates, feel free to browse [our GitLab project](https://gitlab.com/tgdp).\nOr visit our [community page](https://thegooddocsproject.dev/community/) to learn how to join us in leveling up the state of technical documentation.\n\n*The [GitLab Open Source Partners](https://go.gitlab.com/030Ue3) are building the future of open source on GitLab. [Connect with them](https://gitlab.com/gitlab-com/marketing/developer-relations/open-source-program/gitlab-open-source-partners) on Gitlab.com.*\n","open-source",[2728,9,269,719],{"slug":3298,"featured":91,"template":698},"pair-gitlab-and-the-good-docs-project-template-to-improve-release-notes","content:en-us:blog:pair-gitlab-and-the-good-docs-project-template-to-improve-release-notes.yml","Pair Gitlab And The Good Docs Project Template To Improve Release Notes","en-us/blog/pair-gitlab-and-the-good-docs-project-template-to-improve-release-notes.yml","en-us/blog/pair-gitlab-and-the-good-docs-project-template-to-improve-release-notes",{"_path":3304,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3305,"content":3310,"config":3314,"_id":3316,"_type":14,"title":3317,"_source":16,"_file":3318,"_stem":3319,"_extension":19},"/en-us/blog/prepare-now-docker-hub-rate-limits-will-impact-gitlab-ci-cd",{"title":3306,"description":3307,"ogTitle":3306,"ogDescription":3307,"noIndex":6,"ogImage":2920,"ogUrl":3308,"ogSiteName":685,"ogType":686,"canonicalUrls":3308,"schema":3309},"Prepare now: Docker Hub rate limits will impact GitLab CI/CD","Learn how Docker Hub's upcoming pull rate limits will affect GitLab pipelines and what you can do to avoid disruptions.","https://about.gitlab.com/blog/prepare-now-docker-hub-rate-limits-will-impact-gitlab-ci-cd","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Prepare now: Docker Hub rate limits will impact GitLab CI/CD\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Tim Rizzi\"}],\n        \"datePublished\": \"2025-03-24\",\n      }",{"title":3306,"description":3307,"authors":3311,"heroImage":2920,"date":2454,"body":3312,"category":779,"tags":3313},[925],"On April 1, 2025, Docker will implement new [pull rate\nlimits](https://docs.docker.com/docker-hub/usage/) to Docker Hub that may\nsignificantly impact CI/CD pipelines across the industry, including those\nrunning on GitLab. The most significant change is the 100 pulls-per-6-hours\nlimit for unauthenticated users.\n\n\n## What's changing?\n\n\nStarting April 1, Docker will enforce the following pull rate limits:\n\n\n| User type | Pull rate limit per hour | Number of public repositories | Number of private repositories |\n|-----------|----------------|--------------|------------------|\n| Business, Team, Pro (authenticated) | Unlimited (fair use) | Unlimited | Unlimited |\n| Personal (authenticated) | 200 per 6-hour window | Unlimited | Up to 1 |\n| Unauthenticated users | 100 per 6-hour window per IPv4 address or IPv6 /64 subnet | Not applicable | Not applicable |\n\n\n\u003Cp>\u003C/p>\n\nThis is particularly important because:\n\n\n* GitLab's Dependency Proxy currently pulls from Docker Hub as an\nunauthenticated user.\n\n* Most CI/CD pipelines that don't use the Dependency Proxy pull directly\nfrom Docker Hub as unauthenticated users.\n\n* On hosted runners for GitLab.com, multiple users might share the same IP\naddress or subnet, making them collectively subject to this limit.\n\n\n## How this impacts GitLab users\n\n\n**Impact on direct Docker Hub pulls**\n\n\nIf your CI/CD pipelines directly pull images from Docker Hub without\nauthentication, they will be limited to 100 pulls per six-hour window per IP\naddress. For pipelines that run frequently or across multiple projects\nsharing the same runner infrastructure, this will quickly exhaust the limit\nand cause pipeline failures.\n\n\n**Impact on GitLab Dependency Proxy**\n\n\nThe GitLab Dependency Proxy feature allows you to cache Docker images within\nGitLab to speed up pipelines and reduce external dependencies. However, the\ncurrent implementation pulls from Docker Hub as an unauthenticated user,\nmeaning it will also be subject to the 100 pulls-per-6-hours limit.\n\n\n**Impact on hosted runners**\n\n\nFor hosted runners on GitLab.com, we use [Google Cloud's pull-through\ncache](https://cloud.google.com/artifact-registry/docs/pull-cached-dockerhub-images).\nThis mirrors the commonly pulled images and allows us to avoid rate limits.\nJob images defined as `image:` or `services:` in your `.gitlab-ci.yml` file,\nare not affected by rate limits.\n\n\nThings are slightly more challenging whenever images are pulled within the\nrunner environment. The most common use case to pull images during runner\nruntime is to build an image using Docker-in-Docker or Kaniko. In this\nscenario, the Docker Hub image defined in your `Dockerfile` is pulled\ndirectly from Docker Hub and is likely to be affected by rate limits.\n\n\n## How GitLab is responding\n\n\nWe're actively working on solutions to mitigate these challenges:\n\n\n* **Dependency Proxy authentication:** We've added support for Docker Hub\nauthentication in the [GitLab Dependency Proxy\nfeature](https://gitlab.com/gitlab-org/gitlab/-/issues/331741). This will\nallow the Dependency Proxy to pull images from Docker Hub as an\nauthenticated user, significantly increasing the rate limits.\n\n* **Documentation updates:** We've updated our\n[documentation](https://docs.gitlab.com/user/packages/dependency_proxy/#configure-credentials)\nto provide clear guidance on configuring pipeline authentication for Docker\nHub.\n\n* **Internal infrastructure preparation:** We're preparing our internal\ninfrastructure to minimize the impact on hosted runners for GitLab.com.\n\n\n## How you can prepare\n\n\n**Option 1: Configure Docker Hub authentication in your pipelines**\n\n\nFor pipelines that pull directly from Docker Hub, you can configure\nauthentication to increase your rate limit to 200 pulls per six-hour window\n(or unlimited with a paid Docker Hub subscription).\n\n\nAdd Docker Hub credentials to your project or group CI/CD variables (not in\nyour `.gitlab-ci.yml` file). Please refer to our [documentation on using\nDocker\nimages](https://docs.gitlab.com/ci/docker/using_docker_images/#use-statically-defined-credentials)\nfor detailed instructions on setting up the `DOCKER_AUTH_CONFIG` CI/CD\nvariable correctly.\n\n\n**Option 2: Use the GitLab Container Registry**\n\n\nConsider pushing your frequently used Docker images to your [GitLab\nContainer\nRegistry](https://docs.gitlab.com/user/packages/container_registry/). This\neliminates the need to pull from Docker Hub during CI/CD runs:\n\n\n1. Pull the image from Docker Hub.\n\n2. Tag it for your GitLab Container Registry.\n\n3. Push it to your GitLab Container Registry.\n\n4. Update your pipelines to pull from GitLab Container Registry.\n\n\n```\n\ndocker pull busybox:latest\n\ndocker tag busybox:latest $CI_REGISTRY_IMAGE/busybox:latest\n\ndocker push $CI_REGISTRY_IMAGE/busybox:latest\n\n```\n\n\nThen in your `.gitlab-ci.yml`:\n\n\n`image: $CI_REGISTRY_IMAGE/busybox:latest`\n\n\n**Option 3: Use GitLab Dependency Proxy**\n\n\nGitLab's Dependency Proxy feature provides a way to cache and proxy Docker\nimages, reducing external dependencies and rate limit issues.\n\n\nCurrent authentication options:\n\n* GitLab 17.10: Configure Docker Hub authentication for the Dependency Proxy\nusing [GraphQL\nAPI](https://docs.gitlab.com/user/packages/dependency_proxy/#configure-credentials-using-the-graphql-api)\n\n* GitLab 17.11: Use the new UI-based configuration in your group's settings\n(already available on GitLab.com)\n\n\nOnce authentication is properly configured, you can:\n\n\n1. Configure Docker Hub credentials in your group's Dependency Proxy\nsettings:\n  - For GitLab 17.11+ (or current GitLab.com): Navigate to your group's settings > Packages & Registries > Dependency Proxy.\n  - For GitLab 17.10: Use the GraphQL API to configure authentication.\n2. Update your pipelines to use the Dependency Proxy URLs in your CI/CD\nconfiguration:\n\n`image: ${CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX}/busybox:latest`\n\n\n**Option 4: Consider a Docker Hub paid subscription**\n\n\nFor organizations with heavy Docker Hub usage, upgrading to a paid Docker\nsubscription (Team or Business) will provide unlimited pulls, which may be\nthe most straightforward solution.\n\n\n## Best practices to reduce Docker Hub rate limit impact\n\n\nRegardless of which option you choose, consider these best practices to\nminimize Docker Hub rate limit impact:\n\n\n* Use specific image tags instead of `latest` to avoid unnecessary pulls.\n\n* Consolidate your Docker files to use the same base images across projects.\n\n* Schedule less critical pipelines to run outside of peak hours.\n\n* Use caching effectively to avoid pulling the same images repeatedly.\n\n\n**Note:** According to Docker Hub\n[documentation](https://docs.docker.com/docker-hub/usage/pulls/#pull-definition),\nthe pull count is incremented when pulling the image manifest, not based on\nimage size or number of layers.\n\n\n## Timeline and next steps\n\n\n**Now**\n  * Implement authentication for direct Docker Hub pulls.\n  * GitLab.com users can already configure Docker Hub authentication for the Dependency Proxy using either:\n    * The GraphQL API, or\n    * The UI in group settings\n  * Self-managed GitLab 17.10 users can configure Dependency Proxy authentication using the GraphQL API.\n\n**April 1, 2025**\n  * Docker Hub rate limits go into effect.\n\n**April 17, 2025**\n  * GitLab 17.11 will be released with UI-based Dependency Proxy authentication support for self-managed instances. \n\nWe recommend taking action well before the April 1 deadline to avoid\nunexpected pipeline failures. For most users, configuring the Dependency\nProxy with Docker Hub authentication is the most efficient long-term\nsolution.\n\n\n> Have questions or need implementation help? Please visit [this\nissue](https://gitlab.com/gitlab-org/gitlab/-/issues/526605) where our team\nis actively providing support.\n",[109,1133,9],{"slug":3315,"featured":91,"template":698},"prepare-now-docker-hub-rate-limits-will-impact-gitlab-ci-cd","content:en-us:blog:prepare-now-docker-hub-rate-limits-will-impact-gitlab-ci-cd.yml","Prepare Now Docker Hub Rate Limits Will Impact Gitlab Ci Cd","en-us/blog/prepare-now-docker-hub-rate-limits-will-impact-gitlab-ci-cd.yml","en-us/blog/prepare-now-docker-hub-rate-limits-will-impact-gitlab-ci-cd",{"_path":3321,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3322,"content":3328,"config":3336,"_id":3338,"_type":14,"title":3339,"_source":16,"_file":3340,"_stem":3341,"_extension":19},"/en-us/blog/prevent-secret-leaks-in-source-code-with-gitlab-secret-push-protection",{"title":3323,"description":3324,"ogTitle":3323,"ogDescription":3324,"noIndex":6,"ogImage":3325,"ogUrl":3326,"ogSiteName":685,"ogType":686,"canonicalUrls":3326,"schema":3327},"Prevent secret leaks in source code with GitLab Secret Push Protection","Learn how Secret Push Protection, now generally available, adds to a defense-in-depth detection strategy and decreases the resources needed to remediate secret leaks.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097761/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%286%29_6vL96ttKF8zJLLqfPpvFs_1750097761137.png","https://about.gitlab.com/blog/prevent-secret-leaks-in-source-code-with-gitlab-secret-push-protection","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Prevent secret leaks in source code with GitLab Secret Push Protection\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Amar Patel\"},{\"@type\":\"Person\",\"name\":\"Sara Meadzinger\"}],\n        \"datePublished\": \"2024-06-24\",\n      }",{"title":3323,"description":3324,"authors":3329,"heroImage":3325,"date":3332,"body":3333,"category":693,"tags":3334,"updatedDate":3335},[3330,3331],"Amar Patel","Sara Meadzinger","2024-06-24","Secret Push Protection is now generally available for all GitLab Ultimate and GitLab Dedicated customers. [Secret Push Protection](https://docs.gitlab.com/ee/user/application_security/secret_detection/secret_push_protection/) blocks secrets such as keys and API tokens from being pushed to GitLab. The content of each commit is checked for [high-confidence secrets](https://docs.gitlab.com/ee/user/application_security/secret_detection/detected_secrets.html) when pushed to GitLab. If any high-confidence secrets are detected, the push is blocked. By protecting secrets from leaking in the first place, your team can greatly reduce risk and reduce time spent on rotating secrets.\n\n## The risk of leaked secrets\n\nSecrets, such as tokens and API keys, are frequently used by applications to authenticate and provide access to sensitive data. Developers sometimes inadvertently hardcode these secrets, and then push that code into source management systems, like GitLab. Hardcoded secrets stored in plain text are a low-effort, high-value target for malicious actors, as numerous recent high-profile breaches have demonstrated. Secrets do not require any special skills to exploit and many secrets do not automatically expire. Therefore, once a malicious actor has access to a secret, they can continue using it indefinitely to cause data breaches, service disruptions, IP theft, source code theft, and software supply chain compromises. Both [Verizon’s annual Data Breach Investigations Report](https://www.verizon.com/business/resources/reports/dbir) and [IBM’s annual Cost of a Data Breach report](https://www.ibm.com/reports/data-breach) have repeatedly reported that compromised credentials, which include secrets, are one of the most frequent and expensive source of breaches. \n\nIBM’s research also indicates that taking a DevSecOps, or shift-left, approach is the most effective way to reduce the average cost of a data breach. Until now, GitLab’s primary secret detection method has been [Pipeline Secret Detection](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/), which scans committed files after they have been pushed to GitLab and identifies secrets that are already leaked. Once a secret has leaked, it should be considered compromised and must be rotated according to the steps outlined by the secret issuer. Remediating detected secrets requires security teams and developers to work closely together to follow the steps outlined by a secret issuer to rotate the leaked secret. It can be a tedious, confusing, and risky process. Utilizing GitLab’s Secret Push Protection feature, you can shift secret detection further left, protect your secrets from leaking in the first place, and reduce the amount of time and energy required to remediate leaks.\n\n## How Secret Push Protection works\nOnce [Secret Push Protection](https://docs.gitlab.com/ee/user/application_security/secret_detection/secret_push_protection/) is enabled on a project, developers are blocked from pushing code to projects that contain any high-confidence secrets. This ensures a performant experience when pushing your code and also results in a lower number of false alerts. **Note:** Here is the [list of high-confidence patterns Secret Push Protection supports](https://docs.gitlab.com/ee/user/application_security/secret_detection/secret_push_protection/detected_secrets.html). \n\nWhile we are checking the contents of each commit, we've [excluded](https://docs.gitlab.com/ee/user/application_security/secret_detection/secret_push_protection/#coverage) a number of factors in order to optimize the performance of this workflow. Because of this, we recommend using Secret Push Protection in a layered approach alongside [Pipeline Secret Detection](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline). Using both features in tandem maximizes coverage to identify more leaked secrets across the software development lifecycle.\n\n# Get started with Secret Push Protection\n\nWe've put a video playlist together to help you get started on using this feature:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/videoseries?si=kRG65YbljQ-Nu2wa&amp;list=PL05JrBw4t0KoADm-g2vxfyR0m6QLphTv-\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### Enable Secret Push Protection\n\nOn GitLab Dedicated and Self-managed, you must allow the use of Secret Push Protection in your instance and then enable it per project. On GitLab.com, you only need to enable it per project.\n\nYou must have at least the Maintainer role to enable push protection for the project.\n\n1. On the left sidebar, select **Search** or **Go to** and find your project.\n1. On the left sidebar, select **Secure > Security configuration**.\n1. Turn on the Secret Push Protection toggle.\n\n![secret push protection - toggle](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097769/Blog/Content%20Images/Blog/Content%20Images/Screenshot_2024-10-18_at_12.24.47_PM_aHR0cHM6_1750097769198.png)\n\n## Skip push protection\n\nIn some instances, when a push is blocked, you might find it necessary to skip Secret Push Protection. For example, a developer may need to commit a placeholder secret for testing. You can skip Secret Push Protection via a Git option or commit message, meeting developers in whichever Git client they are using. \n\n## Add exclusions\n\nWe released exclusions, giving you flexibility to exclude certain paths, rules from the default ruleset, or raw values from being scanned, detected, and blocked by push protection. From the Security Configuration page, Maintainers and project Owners can manage push protection exclusion lists within the UI on a per-project basis. \n\n![secret push protection - image 4](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097769/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097769199.png)\n\n## Audit events\n\nDisabling Secret Push Protection, or even skipping it altogether, can prove to be costly if not done for the appropriate reasons. We've introduced [audit events](https://docs.gitlab.com/ee/user/compliance/audit_events.html) to help administrators and security teams understand where and how this feature is being used, and to assist in any secrets-related investigations.\n\nWe currently log audit events when Secret Push Protection is: \n\n- enabled/disabled at an instance level\n- enabled/disabled at project level\n- skipped via a push option\n- skipped via a commit message \n\nAnd when an exclusion is:\n- created\n- updated\n- deleted \n\n![secret push protection - image 2](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097769/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097769200.png)\n\nThese audit events can be used in conjunction with [audit event streaming](https://docs.gitlab.com/ee/administration/audit_event_streaming/) to manage audit logs in third-party systems (like SIEMs), enabling customers to capture trends such as: how many times push protection is being skipped; which projects frequently bypass push protection; and which secrets are commonly skipped and may need to be excluded moving forward. \n\n# Dogfooding Secret Push Protection\n\nWe [dogfood everything](https://about.gitlab.com/handbook/engineering/development/principles/#dogfooding) here at GitLab. We've [collaborated](https://gitlab.com/groups/gitlab-org/-/epics/13523) with various teams across the organization to enable this feature across key projects, including our primary GitLab codebase. This process has enabled us to identify and address improvements early in the development process, and it has increased our confidence in the stability, performance, and customer workflows for the release of this feature.\n\n# What's next\n\nYou can help us improve this feature by commenting on [this Secret Push Protection feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/467408). We’ll incorporate your feedback and make [additional improvements](https://gitlab.com/groups/gitlab-org/-/epics/13107) as we continue to add new capabilities to the feature.\n\n> Learn more about the [Secret Push Protection](https://docs.gitlab.com/ee/user/application_security/secret_detection/secret_push_protection/).\n\n# Read more\n\n- [How Secret Detection can proactively revoke leaked credentials](https://about.gitlab.com/blog/how-secret-detection-can-proactively-revoke-leaked-credentials) \n- [How to implement secret management best practices with GitLab](https://about.gitlab.com/the-source/security/how-to-implement-secret-management-best-practices-with-gitlab/)\n- [GitLab native secrets manager to give software supply chain security a boost](https://about.gitlab.com/blog/gitlab-native-secrets-manager-to-give-software-supply-chain-security-a-boost)",[693,719,695,9,717],"2024-10-17",{"slug":3337,"featured":6,"template":698},"prevent-secret-leaks-in-source-code-with-gitlab-secret-push-protection","content:en-us:blog:prevent-secret-leaks-in-source-code-with-gitlab-secret-push-protection.yml","Prevent Secret Leaks In Source Code With Gitlab Secret Push Protection","en-us/blog/prevent-secret-leaks-in-source-code-with-gitlab-secret-push-protection.yml","en-us/blog/prevent-secret-leaks-in-source-code-with-gitlab-secret-push-protection",{"_path":3343,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3344,"content":3350,"config":3356,"_id":3358,"_type":14,"title":3359,"_source":16,"_file":3360,"_stem":3361,"_extension":19},"/en-us/blog/provision-group-runners-with-google-cloud-platform-and-gitlab-ci",{"title":3345,"description":3346,"ogTitle":3345,"ogDescription":3346,"noIndex":6,"ogImage":3347,"ogUrl":3348,"ogSiteName":685,"ogType":686,"canonicalUrls":3348,"schema":3349},"Provision group runners with Google Cloud Platform and GitLab CI","This tutorial will teach you how to set up a new group runner on GitLab.com using Google Cloud Platform in less than 10 minutes.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098300/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_623844718_4E5Fx1Q0DHikigzCsQWhOG_1750098300048.jpg","https://about.gitlab.com/blog/provision-group-runners-with-google-cloud-platform-and-gitlab-ci","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Provision group runners with Google Cloud Platform and GitLab CI\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sarah Matthies\"},{\"@type\":\"Person\",\"name\":\"Noah Ing\"}],\n        \"datePublished\": \"2024-11-19\",\n      }",{"title":3345,"description":3346,"authors":3351,"heroImage":3347,"date":3353,"body":3354,"category":928,"tags":3355},[3352,1357],"Sarah Matthies","2024-11-19","Are you interested in hosting your own servers to run your GitLab CI/CD\npipelines but don’t know where to begin? Setting up a GitLab Runner to run\nyour pipelines on your own infrastructure can seem like a daunting task as\nit requires infrastructure knowledge and the know-how to maintain that\ninfrastructure. Typically this process requires the provision of\ninfrastructure, the installing of dependency, and testing that it works with\nyour GitLab instance.\n\n\nThis article highlights how easy it is to easily spin up a GitLab Runner of\nyour own utilizing GitLab’s Google Cloud Integration. Follow this tutorial\nand it will teach you how to set up a new group runner on GitLab.com using\nGoogle Cloud Platform in less than 10 minutes!\n\n\nYou will learn how to:\n\n\n- Create a new group runner.\n\n- Configure the new group runner’s tags and description.\n\n- Register the new group runner by adding in configurations.\n\n- Provision the GitLab Runner utilizing `gcloud cli` and Terraform.\n\n- Have your GitLab Runner pick up its first GitLab CI job.\n\n\n## Prerequisites\n\n- A terminal with Bash installed\n\n- Owner access on a Google Cloud Platform project\n\n- Terraform (or OpenTofu) [Version\n1.5](https://releases.hashicorp.com/terraform/1.5.7/) or greater \n\n- [gcloud CLI](https://cloud.google.com/sdk/docs/install) \n\n- 10 minutes\n\n\n## Tutorial\n\n1. Create a new group runner under __Build > Runners > New Group Runner__.\n\n\n__Note:__ Navigate to the group level.\n\n\n![GitLab Runner setup\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098317/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750098317126.png)\n\n\n2. Configure the new group runner's tags, description, and any additional\nconfigurations.\n\n\n![New Group Runner\nsetup](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098317/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750098317127.png)\n\n\n3. Select __Google Cloud__.\n\n\n![Select Google Cloud\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098317/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750098317129.png)\n\n\n4. Copy your project ID from Google Cloud Platform.\n\n\n![Copy project ID from GCP\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098317/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098317131.png)\n\n\n5. Fill out your Google Cloud project ID and choose a region, zone, and type\nof machine you want to use.\n\n\n![Screen to fill out Google Cloud\ninformation](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098317/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750098317132.png)\n\n\n6\\. Once this information is filled out, click **Setup instructions**.\n\n\nRun the bash script provided in Step 1 above.\n\n\n**Note:** This script was saved to a file called `setup.sh` for ease of use.\nYou may copy this right into your terminal if you are confident in\ndebugging.\n\n\n![Setup instructions\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098317/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750098317134.png)\n\n\n![Script for GitLab\nRunner](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098317/Blog/Content%20Images/Blog/Content%20Images/image10_aHR0cHM6_1750098317135.png)\n\n\n7\\. Create a `main.tf` file and follow the instructions in GitLab.\n\n\n**Note:** If you want to use OpenTofu instead of Terraform, you can still\ncopy the code and only have to adjust the Terraform commands for applying\nthe configuration. \n\n\n![Install and register GitLab Runner\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098317/Blog/Content%20Images/Blog/Content%20Images/image9_aHR0cHM6_1750098317136.png)\n\n\nOnce successfully provisioned, you should be see the following:\n\n\n![GitLab Runner\ncode](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098317/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750098317137.png)\n\n\n8\\. If you close the instructions and click the **View runners** button, you\nwill now have a newly provisioned runner present with \"Never contacted\" as\nits status.\n\n\n![Newly provisioned runner on\nscreen](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098317/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098317139.png)\n\n\n9\\. In any project, add the following `.gitlab-ci.yml`.\n\n\n```  \n\nstages:  \n  - greet\n\nhello_job:  \n  stage: greet  \n  tags:  \n    - gcp-runner  \n  script:  \n    - echo \"hello\"  \n```\n\n\nVolia! You have set up your first GitLab Runner utilizing Google Cloud\nPlatform.\n\n\n# Next steps\n\n\nNow that you have provisioned your very own GitLab Runner, consider\noptimizing it for your specific use case. Some things to consider with your\nrunner moving forward:\n\n\n- Is the runner I provisioned the right size? Does it need additional\nresources for my use case? \n\n- Does the GitLab Runner contain all the dependency my builds need?  \n\n- How can I store the GitLab Runner as infrastructure as code?\n\n\n> Make sure to bookmark the [Provisioning runners in Google Cloud\ndocumentation](https://docs.gitlab.com/ee/ci/runners/provision_runners_google_cloud.html)\nfor easy reference.\n",[719,9,1542,109,930,1968,233],{"slug":3357,"featured":6,"template":698},"provision-group-runners-with-google-cloud-platform-and-gitlab-ci","content:en-us:blog:provision-group-runners-with-google-cloud-platform-and-gitlab-ci.yml","Provision Group Runners With Google Cloud Platform And Gitlab Ci","en-us/blog/provision-group-runners-with-google-cloud-platform-and-gitlab-ci.yml","en-us/blog/provision-group-runners-with-google-cloud-platform-and-gitlab-ci",{"_path":3363,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3364,"content":3370,"config":3375,"_id":3377,"_type":14,"title":3378,"_source":16,"_file":3379,"_stem":3380,"_extension":19},"/en-us/blog/quick-vulnerability-remediation-with-gitlab-advanced-sast-duo-ai",{"title":3365,"description":3366,"ogTitle":3365,"ogDescription":3366,"noIndex":6,"ogImage":3367,"ogUrl":3368,"ogSiteName":685,"ogType":686,"canonicalUrls":3368,"schema":3369},"Quick vulnerability remediation with GitLab Advanced SAST + Duo AI ","Shorten your mean time to remediation by pairing Advanced SAST and artificial intelligence. This detailed demo shows you how.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098458/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945_24mPf16vAPHORs3d9y62q_1750098458538.png","https://about.gitlab.com/blog/quick-vulnerability-remediation-with-gitlab-advanced-sast-duo-ai","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Quick vulnerability remediation with GitLab Advanced SAST + Duo AI \",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2024-10-22\",\n      }",{"title":3365,"description":3366,"authors":3371,"heroImage":3367,"date":3372,"body":3373,"category":801,"tags":3374},[1599],"2024-10-22","With GitLab 17.4, we’ve made [GitLab Advanced SAST generally\navailable](https://about.gitlab.com/blog/gitlab-advanced-sast-is-now-generally-available/).\n[GitLab Advanced\nSAST](https://docs.gitlab.com/ee/user/application_security/sast/gitlab_advanced_sast.html)\nis a static application security testing scanner designed to discover\nvulnerabilities by performing cross-function and cross-file taint analysis.\nBy following the paths user inputs take, the analyzer identifies potential\npoints where untrusted data can influence the execution of your application\nin unsafe ways, ensuring the vulnerabilities are detected even when they\nspan multiple functions and files.\n\n\nGitLab Advanced SAST can be used together with [GitLab Duo Vulnerability\nExplanation](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#explaining-a-vulnerability)\nin order to reduce the mean time to remediation (MTTR). GitLab Duo can\nprovide practical, AI-powered examples of how threat actors can exploit\nvulnerabilities and offer light-weight remediation guidance, which can be\nused with cross-file analysis to enhance application security (AppSec)\nefficiency.\n\n\nThis tutorial will show you how to:\n\n* enable GitLab Advanced SAST\n\n* read results from the scanner\n\n* review the code flow of a vulnerability\n\n* use GitLab AI to quickly remediate the vulnerability\n\n\n## Enable GitLab Advanced SAST\n\n\nFollow the instructions below to enable GitLab Advanced SAST. You can also\nview this video to get started:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/xDa1MHOcyn8?si=5SYuKgP-BdBryqcU\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## Run GitLab Advanced SAST on each code commit\n\n\nBefore using Advanced SAST, the following prerequisites must be met:\n\n\n- GitLab Ultimate Subscription ([free\ntrial](https://gitlab.com/-/trials/new?glm_content=default-saas-trial&glm_source=about.gitlab.com%2F))\n\n- GitLab SaaS or GitLab Self-managed (running Version 17.4)\n\n\nTo enable the GitLab Advanced SAST scanner:\n\n\n- On the left sidebar, select **Search** or **Go to** and find your project.\n\n- Add or edit the `.gitlab-ci.yml` to include the following:\n    - Test stage\n    - `Jobs/SAST.gitlab-ci.yml` template\n    - `GITLAB_ADVANCED_SAST_ENABLED` variable set to true\n- Apply the change.\n\n\nYour newly merged `.gitlab-ci.yml` should contain the following:\n\n\n```yaml\n\nstages:\n  - test\n\ninclude:\n  - template: Jobs/SAST.gitlab-ci.yml\n\nvariables:\n  GITLAB_ADVANCED_SAST_ENABLED: 'true'\n```\n\n\nThis will now run the `gitlab-advances-sast` job within the test stage of\nyour application along with all the other jobs you have defined. Advanced\nSAST will replace the semgrep SAST scanner for the [supported programming\nlanguages](https://docs.gitlab.com/ee/user/application_security/sast/gitlab_advanced_sast.html#supported-languages).\n\n\n![Running `gitlab-advances-sast` job within the test stage of your\napplication](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098466/Blog/Content%20Images/Blog/Content%20Images/1_aHR0cHM6_1750098466629.png)\n\n\n\u003Ccenter>\u003Ci>GitLab Advanced SAST job in pipeline\u003C/i>\u003C/center>\n\n\n\u003Cbr>\u003C/br>\n\n\n**Note:** You can fully configure the job as you would any job in GitLab.\nFor more information, see the [CI/CD YAML syntax\ndocumentation](https://docs.gitlab.com/ee/ci/yaml/).\n\n\n## Remediate vulnerabilities in merge request (pre-production)\n\n\nJust like our previous SAST scanner, Advanced SAST allows you to scan source\ncode in the diff of a feature branch. This allows us to address any incoming\nvulnerabilities before they make it into production. Here we can see the\nscanner results for the diff within a merge request:\n\n\n![Advanced SAST scanner results for the diff within a merge\nrequest](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098467/Blog/Content%20Images/Blog/Content%20Images/2_aHR0cHM6_1750098466630.png)\n\n\nWhen selecting a newly detected vulnerability, we get the following details\nto assist with remediation:\n\n\n- **Status:** The status of the vulnerability (Needs triage, Confirmed,\nDismissed, Resolved)\n\n- **Description:** Detailed information on the detected vulnerability\n\n- **Detection time:** Time vulnerability was detected\n\n- **Location:** Line of code where vulnerability is detected\n\n- **Severity:** Severity of vulnerability from CVE database\n\n- **Training:** Gamified training from our partners\n\n- **Solutions:** Information on how to remediate or resolve a vulnerability\n\n- **Identifiers:** Relevant links showcasing detailed description,\nexploitation, and remediation\n\n\n![Merge request with vulnerability\ninsights](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098467/Blog/Content%20Images/Blog/Content%20Images/MR_with_vulnerability_insights_aHR0cHM6_1750098466632.png)\n\n\n\u003Ccenter>\u003Ci>Merge request with vulnerability insights\u003C/i>\u003C/center>\n\n\n\u003Cbr>\u003C/br> \n\nVulnerabilities detected within an MR are actionable, meaning they can be\ndismissed or an issue can be created and populated with relevant\nvulnerability information.\n\n\nDismissing an issue saves AppSec teams time, because they can see relevant\ndeveloper information when reviewing an MR. Creating a confidential issue\nallows developers and AppSec teams to further collaborate on resolving a\nvulnerability where a fix is not straightforward. Confidential issues have\nlimited permissions and can be used with confidential merge requests to\nprevent possible malicious actors from exploiting.\n\n\nTo further support separation of duties and prevent vulnerable code from\nmaking it into production, you can require approval from certain people (for\nexample, the security team) in order to merge vulnerable code.\n\n\n![GitLab security policies in\naction](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098467/Blog/Content%20Images/Blog/Content%20Images/security_policies_in_action_aHR0cHM6_1750098466634.png)\n\n\n\u003Ccenter>\u003Ci>Security policies in action\u003C/i>\u003C/center>\n\n\n\u003Cbr>\u003C/br>\n\n\n**Note:** Learn more about Security Policies and how to implement them in\nthe [Security Policy\ndocumentation](https://docs.gitlab.com/ee/user/application_security/policies/).\n\n\n## Manage vulnerabilities in production\n\n\nWhile preventing vulnerabilities from making it into production is crucial\nfor application security, it is equally as important to manage\nvulnerabilities in production. When security scanners are run on a default\nor production-level branch, a [vulnerability\nreport](https://docs.gitlab.com/ee/user/application_security/vulnerability_report/)\nwill be populated with the latest vulnerability data which can be used to\ntriage and manage vulnerabilities.\n\n\n![GitLab Vulnerability Report sorted by Advanced\nSAST](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098467/Blog/Content%20Images/Blog/Content%20Images/5_aHR0cHM6_1750098466636.png)\n\n\n\u003Ccenter>\u003Ci>GitLab Vulnerability Report sorted by Advanced SAST\u003C/i>\u003C/center>\n\n\u003Cbr>\u003C/br>\n\n\nWhen selecting a vulnerability you get similar vulnerability details as seen\nin a merge request, making for a single source of truth for developers and\nAppSec teams.\n\n\n![Vulnerability page with vulnerability\ninsights](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098467/Blog/Content%20Images/Blog/Content%20Images/Vuln_page_with_vulnerability_insights_aHR0cHM6_1750098466637.png)\n\n\n\u003Ccenter>\u003Ci>Vulnerability page with vulnerability insights\u003C/i>\u003C/center>\n\n\n\u003Cbr>\u003C/br>\n\n\nAppSec teams can triage a vulnerability by changing its status and adding\nrelevant details on the status change. Issues can be created to track the\nprogress of a fix. From here, a developer can be assigned.\n\n\n## Examine vulnerable code flow\n\n\nFor vulnerabilities detected with Advanced SAST, we can see a \"Code flow\"\ntab on the Vulnerability page.\n\n\n![Advanced SAST - image\n7](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098467/Blog/Content%20Images/Blog/Content%20Images/7_aHR0cHM6_1750098466638.png)\n\n\n\u003Ccenter>\u003Ci>GitLab Advanced SAST code flow\u003C/i>\u003C/center>\n\n\u003Cbr>\u003C/br>\n\n\nIn this example, you can see that a vulnerability is traced across multiple\nfunctions, giving deeper insight into the best practices we should put in\nplace to not only resolve the vulnerability, but prevent similar\nvulnerabilities in the future.\n\n\n## Use GitLab Duo Vulnerability Explanation\n\n\nGitLab Duo can help you mitigate or remediate a vulnerability by using a\nlarge language model to:\n\n\n- Summarize the vulnerability\n\n- Help developers and security analysts understand the vulnerability\n\n- Show how the vulnerability can be exploited\n\n- Provide a suggested remediation or mitigation\n\n\nTo use Vulnerability Explanation, the following is required:\n\n\n- GitLab Ultimate subscription\n\n- GitLab Duo Enterprise seat\n\n- GitLab Duo must be enabled for your group or instance\n\n\nFrom the vulnerability report, you can select a SAST vulnerability and go to\nits Vulnerability page. From the Vulnerability page, you can do any of the\nfollowing to explain the vulnerability:\n\n\n- Select the text below the vulnerability description\n\n- You can use AI by asking GitLab Duo Chat to explain this vulnerability and\noffer a suggested fix.\n\n- In the upper right, from the \"Resolve with merge request\" dropdown list,\nselect **Explain Vulnerability**, then select **Explain vulnerability**.\n\n- Open GitLab Duo Chat and use the explain a vulnerability command:\n`/vulnerability_explain`.\n\n\nThen the vulnerable code will be processed by Anthropic’s Claude 3 Haiku\nmodel and provide the following data:\n\n\n![GitLab Duo Vulnerability\nExplanation](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098467/Blog/Content%20Images/Blog/Content%20Images/vuln_explain_2_aHR0cHM6_1750098466640.png)\n\n\n## Putting it all together\n\n\nNow, let's put it all together with a concrete example. I will use the\n[OWASP Juice Shop](https://owasp.org/www-project-juice-shop/) as my demo\napplication and run GitLab Advanced SAST to detect a vulnerability in\nproduction. Then I will use the vulnerability code flow and GitLab Duo to\ninvestigate vulnerability exploitation, and remediation. You can [follow\nalong with this\ndemo](https://gitlab.com/gitlab-da/tutorials/security-and-governance/owasp/juice-shop)\nand see this workflow in action by watching:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/H1S43oM44k0?si=2LYorTjByOHbCAko\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\nThe detection and remediation workflow is as follows:\n\n\n- Enable GitLab Advanced SAST and run it on the project’s default branch.\n\n- Open the Vulnerability Report and sort by **Tool:GitLab Advanced SAST**.\n\n- Select the **Improper neutralization of special elements in data query\nlogic** vulnerability found in `Basket.ts`.\n\n- Use the vulnerability code flow to understand the vulnerable paths.\n\n- Run **Explain this vulnerability** to see exploit information.\n\n- Run the application locally to attempt exploitation.\n\n- Change vulnerability status to \"Confirmed\" and provide relevant info.\n\n- Determine remediation path using all relevant data:\n    - Vulnerability page insights, Code Flow, Vulnerability Explanation results\n- Create a new branch and apply remediation.\n\n- Run the remediated application locally and try to exploit again.\n\n- Create a merge request with the fix.\n\n- Code change will be tested using CI to assure we don’t break the\napplication.\n\n- Validate and merge MR.\n\n- Test exploit in deployed environment.\n\n- Change vulnerability status to \"Resolved\" on the Vulnerability page.\n\n\n**Note:** There are many ways to triage and remediate vulnerabilities, make\nsure to follow best practices set by your organization.\n\n\n# Useful links\n\n\nTo learn more about GitLab and how you can get started with enhancing your\norganization’s application security posture, check out the following\nresources.\n\n\n* [GitLab Ultimate](https://about.gitlab.com/pricing/ultimate/) \n\n* [GitLab Duo](https://about.gitlab.com/gitlab-duo/)  \n\n* [GitLab Security and Compliance\nSolutions](https://about.gitlab.com/solutions/security-compliance/)  \n\n* [GitLab Software Supply Chain Security\nSolutions](https://about.gitlab.com/solutions/supply-chain/)  \n\n* [GitLab Continuous Software\nCompliance](https://about.gitlab.com/solutions/continuous-software-compliance/)  \n\n* [JuiceShop Demo\nApplication](https://gitlab.com/gitlab-da/tutorials/security-and-governance/owasp/juice-shop)  \n\n* [GitLab AppSec\ndocumentation](https://docs.gitlab.com/ee/user/application_security/)  \n\n* [Advanced SAST \ndocumentation](https://docs.gitlab.com/ee/user/application_security/sast/gitlab_advanced_sast.html)  \n\n* [Explain this Vulnerability\ndocumentation](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#explaining-a-vulnerability)  \n\n* [Code Flow\ndocumentation](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#vulnerability-code-flow)  \n\n* [Security Policy\ndocumentation](https://docs.gitlab.com/ee/user/application_security/policies/) \n\n* [OWASP Juice Shop\ndocumentation](https://owasp.org/www-project-juice-shop/)\n",[803,693,719,695,9],{"slug":3376,"featured":91,"template":698},"quick-vulnerability-remediation-with-gitlab-advanced-sast-duo-ai","content:en-us:blog:quick-vulnerability-remediation-with-gitlab-advanced-sast-duo-ai.yml","Quick Vulnerability Remediation With Gitlab Advanced Sast Duo Ai","en-us/blog/quick-vulnerability-remediation-with-gitlab-advanced-sast-duo-ai.yml","en-us/blog/quick-vulnerability-remediation-with-gitlab-advanced-sast-duo-ai",{"_path":3382,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3383,"content":3389,"config":3394,"_id":3396,"_type":14,"title":3397,"_source":16,"_file":3398,"_stem":3399,"_extension":19},"/en-us/blog/quickly-resolve-broken-ci-cd-pipelines-with-ai",{"title":3384,"description":3385,"ogTitle":3384,"ogDescription":3385,"noIndex":6,"ogImage":3386,"ogUrl":3387,"ogSiteName":685,"ogType":686,"canonicalUrls":3387,"schema":3388},"Quickly resolve broken CI/CD pipelines with AI","When your CI/CD pipeline fails, it leads to delays, decreased productivity, and stress. AI-powered Root Cause Analysis makes problem-solving faster and smarter.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097355/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2811%29_78Dav6FR9EGjhebHWuBVan_1750097355230.png","https://about.gitlab.com/blog/quickly-resolve-broken-ci-cd-pipelines-with-ai","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Quickly resolve broken CI/CD pipelines with AI\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Itzik Gan Baruch\"}],\n        \"datePublished\": \"2024-12-03\",\n      }",{"title":3384,"description":3385,"authors":3390,"heroImage":3386,"date":2178,"body":3392,"category":801,"tags":3393},[3391],"Itzik Gan Baruch","CI/CD pipelines are the backbone of efficiency in software development. They help teams test, build, and deploy code quickly. But when these pipelines break, everything slows down — deadlines get missed, and developers are left frustrated as they work to fix things and keep projects on track.\n\n![CI/CD pipeline with multiple failed jobs](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097362/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097362772.png)\n\n\u003Ccenter>\u003Ci>CI/CD pipeline with multiple failed jobs\u003C/i>\u003C/center>\u003Cbr>\u003C/br>\n\n**So, why do pipelines break in the first place?** Let’s break it down.\n\n## Reasons for pipeline failures\n\nA pipeline failure occurs when the automated workflow in your [CI/CD pipeline](https://about.gitlab.com/topics/ci-cd/cicd-pipeline/) — a series of steps that can include building, testing, and deploying code — does not execute as expected and ends with an error message. This failure can prevent code from being properly built, tested, or deployed, causing delays in software delivery and requiring troubleshooting to resolve. \n\nPipeline failures can happen for a variety of reasons. Some common causes include:\n- Syntax errors: A small mistake in the code, like a missing semicolon or incorrect variable name, can cause the pipeline to fail.\n- Failed tests: Unit or integration tests might fail due to broken code, incorrect configurations, or mismatched dependencies.\n- Misconfigurations: Incorrect pipeline settings or environment configurations can lead to failed builds or deployments.\n\nThere are also more complex issues that add to the challenge:\n- Infrastructure-as-Code ([IaC](https://about.gitlab.com/topics/gitops/infrastructure-as-code/)) issues: Problems in provisioning cloud infrastructure, such as errors in Terraform scripts or CloudFormation templates, can prevent a successful deployment.\n- Kubernetes and GitOps challenges: Misconfigurations in [Kubernetes clusters](https://about.gitlab.com/blog/kubernetes-the-container-orchestration-solution/) or issues with [GitOps](https://about.gitlab.com/topics/gitops/) workflows (e.g., syncing Kubernetes states with Git repositories) can cause pipeline failures that are difficult to diagnose.\n- Long, messy stack traces: When an error occurs deep in the system, stack traces can become long and hard to decipher, especially when they span multiple components or services.\n\nThese challenges make troubleshooting more difficult and time-consuming, as finding the root cause often involves sifting through complex logs, reviewing configuration files, and testing different solutions.\n\n## The real impact of failed pipelines\n\nWhen a pipeline fails, it doesn’t just delay your deployment — it brings stress and frustration. Developers are forced to pause their work and dive into troubleshooting, which often leads to a chain reaction of disruptions. This makes it harder to meet deadlines and increases the pressure on the entire team. But why is manual troubleshooting so stressful?\n\n### Manual troubleshooting \n\nThe time it takes to fix a broken pipeline varies. It depends on things like:\n- How well the developer knows the project\n- How experienced they are with similar issues\n- Their overall problem-solving skills\n\nManually digging through logs to figure out what went wrong is a tough and tedious process. Logs can come from all over the place, including application errors and system messages, and they’re often messy and hard to interpret. And on top of that, fixing the pipeline usually requires a lot of jumping back and forth between tasks, adding more time to the process.\n\nThis is where [GitLab Duo](https://about.gitlab.com/gitlab-duo/) comes in. GitLab Duo can sift through all that messy data and spot issues much faster, simplifying the process so you don’t need to be an expert to figure out what went wrong. With AI, fixing your pipelines becomes faster, easier, and much less stressful.\n\n\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752176104/Blog/zxvvu7p9vc3qpmwl32ya.png\" alt=\"broken pipeline\">\n\n\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752176108/Blog/bpx6dqilfhltzboyp8k8.png\" alt=\"fix suggestions for broken pipelines\">\n\n## GitLab Duo Root Cause Analysis with generative AI\n\nWhen your CI/CD pipeline breaks, you don’t have to spend hours manually troubleshooting. Enter [GitLab Duo’s Root Cause Analysis (RCA)](https://docs.gitlab.com/ee/user/gitlab_duo/#root-cause-analysis). This AI-powered tool quickly identifies the exact cause of the failure and suggests fixes — right within the DevSecOps platform. No matter how long or complicated your stack traces are, RCA analyzes all the data, breaks it down, and gives you clear, actionable insights.\n\n**It tells you exactly what caused the error, provides steps to fix it, and even pinpoints the specific files and lines of code that need attention.** And, to make it even easier, it suggests code fixes to get everything back on track. This makes troubleshooting a lot faster and more straightforward.\n\n\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752176111/Blog/nmagby9hoksskogve53m.png\" alt=\"root cause of failure\">\n\n\u003Cimg src=\"https://res.cloudinary.com/about-gitlab-com/image/upload/v1752176115/Blog/dndis1cedwbmbnj33q3v.png\" alt=\"example fix\">\n\n## Keep the conversation going with follow-up questions\n\nWith GitLab Duo RCA, you don’t just get answers — you can ask follow-up questions to dig deeper. Want to explore alternative solutions? No problem. You can add [more context](https://docs.gitlab.com/ee/user/gitlab_duo_chat/index.html#the-context-chat-is-aware-of) by referencing other files, issues, or epics in your repo. For example, you could open your `.gitlab-ci.yml` file in the IDE and ask the chat, “Based on this file, and the analyzed CI/CD pipeline, how would you propose to optimize the pipeline?” \n\n## Privacy first – everything stays in GitLab\nOne of the key benefits of GitLab Duo RCA is that it works right out of the box within GitLab. You won’t have to switch tools or go hunting for external help. Plus, your [logs and sensitive data stay secure](https://about.gitlab.com/privacy/) - there’s no need to send them off to external AI solutions. RCA is seamlessly integrated within GitLab, offering valuable insights without ever compromising privacy.\n\n![broken pipelines - image 6](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097363/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097362773.png)\n\n## Get started today\n\nWant to see how AI can supercharge your development process, making it smoother and faster? Dive into our GitLab Duo Enterprise product tour below and discover how GitLab Duo’s AI-powered insights can transform every stage of your development journey — from planning and coding to troubleshooting and deployment. Click the image below to start the tour!\n\n[![GitLab Duo Enterprise tour](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097363/Blog/Content%20Images/Blog/Content%20Images/Screenshot_2024-12-02_at_12.41.10_PM_aHR0cHM6_1750097362774.png)](https://gitlab.navattic.com/duo-enterprise)\n\n> [Start a free trial of GitLab Duo today!](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/)",[803,9,719,695],{"slug":3395,"featured":6,"template":698},"quickly-resolve-broken-ci-cd-pipelines-with-ai","content:en-us:blog:quickly-resolve-broken-ci-cd-pipelines-with-ai.yml","Quickly Resolve Broken Ci Cd Pipelines With Ai","en-us/blog/quickly-resolve-broken-ci-cd-pipelines-with-ai.yml","en-us/blog/quickly-resolve-broken-ci-cd-pipelines-with-ai",{"_path":3401,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3402,"content":3408,"config":3414,"_id":3416,"_type":14,"title":3417,"_source":16,"_file":3418,"_stem":3419,"_extension":19},"/en-us/blog/rate-limitations-announced-for-projects-groups-and-users-apis",{"title":3403,"description":3404,"ogTitle":3403,"ogDescription":3404,"noIndex":6,"ogImage":3405,"ogUrl":3406,"ogSiteName":685,"ogType":686,"canonicalUrls":3406,"schema":3407},"Rate limitations announced for Projects, Groups, and Users APIs","Learn why these changes are being made, when they go into effect, and what to expect.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662504/Blog/Hero%20Images/devsecops-automated-security.jpg","https://about.gitlab.com/blog/rate-limitations-announced-for-projects-groups-and-users-apis","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Rate limitations announced for Projects, Groups, and Users APIs\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Christina Lohr\"}],\n        \"datePublished\": \"2024-05-14\",\n      }",{"title":3403,"description":3404,"authors":3409,"heroImage":3405,"date":3411,"body":3412,"category":779,"tags":3413,"updatedDate":2704},[3410],"Christina Lohr","2024-05-14","In recent months, we have observed that the frequency and intensity of requests made by users to the Projects, Groups, and Users APIs have increased significantly. This has resulted in an increased load on our servers, which has impacted the performance and stability of our platform for all users. To address this issue, we have decided to introduce rate limitations for all users.\n\nIn the coming months, we will introduce rate limitations to the Projects, Groups and Users API according to the following schedule:\n\n__Groups and Projects API__\n- Brownouts\n  - Feb 4, 2025: For 12 hours starting at 12:00 UTC\n  - Mar 4, 2025: For 24 hours starting at 12:00 UTC\n- Permanent rollout\n  - Apr 2, 2025 at 12:00 UTC\n\n__Users API__\n- Brownouts\n  - Feb 24, 2025: For 12 hours starting at 15:00 UTC\n  - Feb 27, 2025: For 24 hours starting at 15:00 UTC\n- Permanent rollout\n  - Mar 3, 2025 at 12:00 UTC\n\nTo protect our Dedicated and Self-managed customers from unexpected changes, we're implementing a careful rollout strategy for our new API rate limitations. We'll be using a feature flag that allows us to apply these limits exclusively to GitLab.com while keeping them disabled by default for Self-managed and Dedicated instances.\n\nWhen upgrading to 18.0, these rate limits will be set to zero and the feature flag will be disabled by default, ensuring no disruption to your current workflows. This approach gives administrators complete control - you can choose to enable these limits at your convenience by toggling the feature flag and setting the appropriate rate limits for your installation.\nIn GitLab 18.1, the feature flag will be removed. It means the rate limits you've defined before that (unlimited by default) will automatically be used, even if you didn't enable the feature flag.\n\nFrom GitLab 18.3, new installations will have the default rate limits (see \"Rate limitation details\" below) applied.\n\n## What are the Projects, Groups, and Users APIs?\n\nThe Projects and Groups APIs provide information about GitLab projects and groups, including name, description, and other metadata. The Users API provides information about GitLab users. These APIs are widely used by our community, including researchers, developers, and integrators, to retrieve and analyze information about GitLab projects, groups and users. We value this usage and aim to support it as much as possible.\n\n## Rate limitation details\n\nAll users of the Projects, Groups, and Users APIs will be rate-limited. The limits per endpoint are as follows:\n\n| Endpoint    | Rate limit per user or IP     |\n| ---------- | ---------- |\n|GET /api/v4/users/:id/followers | 100 requests per minute |\n|GET /api/v4/users/:id/following | 100 requests per minute |\n| GET /api/v4/users/:user_id/status | 240 requests per minute |\n| GET /api/v4/users/:user_id/keys | 120 requests per minute |\n| GET /api/v4/users/:id/keys/:key_id | 120 requests per minute |\n| GET /api/v4/users/:id/gpg_keys | 120 requests per minute |\n| GET /api/v4/users/:id/gpg_keys/:key_id | 120 requests per minute |\n| GET /api/v4/users/:user_id/projects | 300 requests per minute |\n| GET /api/v4/users/:user_id/contributed_projects | 100 requests per minute |\n| GET /api/v4/users/:user_id/starred_projects | 100 requests per minute |\n| GET /api/v4/projects | 2000 requests per 10 minutes |\n| GET /api/v4/groups/:id/projects | 600 requests per minute |\n| GET /api/v4/projects/:id | 400 requests per minute |\n| GET /api/v4/groups | 200 requests per minute |\n| GET /api/v4/groups/:id | 400 requests per minute |\n\u003Cp>\u003C/p>\n\nWe rate-limit based on user for authenticated requests and based on IP for unauthenticated requests. We use the same value for both IP and user. The stated limits have been selected based on average usage patterns and should provide sufficient headroom for most use cases.\n\nIf a user exceeds this limit, the user will receive a \"429 Too Many Requests\" response. On GitLab.com, this limit cannot be changed. Users of GitLab Self-managed instances have the same rate limitation set by default, but admins can change the rate limits as they see fit via the UI or the application settings API. They can also set the rate limit to zero, which acts as if there is no rate limitation at all.\n\nWe understand that this change may impact some of our users who rely on the Projects, Groups, and Users APIs, and we apologize for any inconvenience this may cause.\n\nIf you have any questions or concerns about this change, please leave feedback in [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/458904).\n\n#### Note\nThis post has been updated recently. These are the changes that were made:\n* Added detailed rollout strategy for Self-managed/Dedicated: Introduces a phased implementation with feature flags that initially apply limits only to GitLab.com, with zero rate limits by default for Self-managed/Dedicated in GitLab 18.0.\n* Clarified version timeline: Added specifics about feature flag removal in GitLab 18.1 and default rate limits for new installations starting in 18.3.",[9,717],{"slug":3415,"featured":6,"template":698},"rate-limitations-announced-for-projects-groups-and-users-apis","content:en-us:blog:rate-limitations-announced-for-projects-groups-and-users-apis.yml","Rate Limitations Announced For Projects Groups And Users Apis","en-us/blog/rate-limitations-announced-for-projects-groups-and-users-apis.yml","en-us/blog/rate-limitations-announced-for-projects-groups-and-users-apis",{"_path":3421,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3422,"content":3428,"config":3433,"_id":3435,"_type":14,"title":3436,"_source":16,"_file":3437,"_stem":3438,"_extension":19},"/en-us/blog/reduce-supply-chain-risk-with-smarter-vulnerability-prioritization",{"title":3423,"description":3424,"ogTitle":3423,"ogDescription":3424,"noIndex":6,"ogImage":3425,"ogUrl":3426,"ogSiteName":685,"ogType":686,"canonicalUrls":3426,"schema":3427},"Reduce supply chain risk with smarter vulnerability prioritization","New software composition analysis features use risk-based intelligence so developers and security teams can prioritize critical vulnerabilities for targeted remediation.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674528/Blog/Hero%20Images/blog-image-template-1800x945__5_.png","https://about.gitlab.com/blog/reduce-supply-chain-risk-with-smarter-vulnerability-prioritization","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Reduce supply chain risk with smarter vulnerability prioritization\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Salman Ladha\"}],\n        \"datePublished\": \"2025-01-07\",\n      }",{"title":3423,"description":3424,"authors":3429,"heroImage":3425,"date":3430,"body":3431,"category":693,"tags":3432},[2864],"2025-01-07","Application Security teams face a constant uphill battle in risk reduction due to the ever-growing number of vulnerabilities. This year alone, [36,000 Common Vulnerabilities and Exposures (CVEs)](https://www.cvedetails.com/) have been reported — a 25% increase from last year. The sharp rise intensifies the challenge of prioritization in vulnerability management, especially for lean AppSec teams. \n\nTo help, we’ve introduced several new enhancements to our Software Composition Analysis (SCA) solution. These improvements are available for all GitLab Ultimate customers:  \n\n* **Static Reachability Analysis** identifies the *exploitable* vulnerabilities from open source components in your applications.   \n* **Known Exploited Vulnerabilities** (KEV) **Indicator** highlights known, actively exploited vulnerabilities.   \n* **Exploit Prediction Scoring System** (EPSS) predicts the likelihood of a vulnerability being exploited.\n\nBy prioritizing exploitable vulnerabilities, AppSec teams can reduce triage times, accelerate remediation cycles, and improve collaboration with their development counterparts. Powered by our recent acquisitions of [Oxeye](https://about.gitlab.com/blog/oxeye-joins-gitlab-to-advance-application-security-capabilities/) and [Rezilion's intellectual property](https://ir.gitlab.com/news/news-details/2024/GitLab-Reports-First-Quarter-Fiscal-Year-2025-Financial-Results/default.aspx), these new capabilities align with our vision of providing best-in-class application security solutions, natively built into developer workflows. \n\n### What is SCA and why does it matter? \n\nSoftware Composition Analysis helps organizations identify and manage open source components within their applications. By scanning the codebase, SCA provides insights into the component versions, licenses, and importantly, known vulnerabilities. With [90% of Fortune 500](https://www.nber.org/be/20241/open-source-software-creators-its-not-just-about-money) companies dependent on open source components for their applications, SCA provides much-needed visibility to mitigate software supply chain risk. \n\nHigh-profile breaches like [SolarWinds](https://www.wired.com/story/the-untold-story-of-solarwinds-the-boldest-supply-chain-hack-ever/) and [Log4Shell](https://www.ncsc.gov.uk/information/log4j-vulnerability-what-everyone-needs-to-know) highlight how vulnerabilities in third-party components can compromise countless downstream applications. SCA tools act as proactive measures, enabling teams to identify vulnerabilities and enforce compliance early in the software development lifecycle, ensuring software security while maintaining development velocity. \n\n### Filter out the noise for targeted remediation \n\nWith our latest SCA enhancements, GitLab helps you cut through the noise to prioritize real risks, reduce backlogs, and remediate faster – all within your existing workflows. \n\n**Focus on vulnerabilities that pose the greatest risk** \n\n* Static Reachability Analysis leverages the proprietary detection engine of our [Advanced SAST](https://about.gitlab.com/blog/gitlab-advanced-sast-is-now-generally-available/) solution to surface vulnerabilities from dependencies that can *actually* be exploited in your application. \n\n**Reduce triage times** \n\n* With KEV indicators and EPSS scoring, GitLab gives security teams actionable insights into vulnerabilities that are actively being exploited or likely to be targeted. Incorporating risk-based scoring helps teams effectively triage their vulnerability backlog. \n\n**Faster remediation to mitigate supply chain risk** \n\n* Our SCA enhancements are built into developer workflows, providing contextual remediation guidance while maintaining developer productivity. \n\n### What’s next for SCA \n\nWe’re continuing to integrate Rezilion’s technology into our platform to help teams secure their software supply chains more effectively. Rezilion will be key to powering future innovations, including:\n\n* **Supporting faster remediation** workflows by automatically opening merge requests with fixes for detected vulnerabilities   \n* **Enriching package metadata** using [OpenSSF scorecard ratings](https://openssf.org/projects/scorecard/) to provide security teams with more information on dependencies such as authors and end-of-life status   \n* **Improving open-source software license detection** to ensure compliance and reduce legal risks \n\n### Get started with SCA \n\nIf you’re an existing GitLab Ultimate customer and would like to learn more about how Software Composition Analysis can enhance your application security program, visit our [documentation](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/). There, you’ll find details on implementation requirements, use cases, and more. Or if you’re not yet a GitLab Ultimate customer, get started with a [free trial](https://about.gitlab.com/free-trial/) today to explore how GitLab enhances your ability to write secure software, achieve compliance goals, and improve development velocity. \n\n##### ***Disclaimer**: This blog contains information related to upcoming products, features, and functionality. It is important to note that the information in this blog post is for informational purposes only. Please do not rely on this information for purchasing or planning purposes. As with all projects, the items mentioned in this blog and linked pages are subject to change or delay. The development, release, and timing of any products, features, or functionality remain at the sole discretion of GitLab.*",[693,695,9,717],{"slug":3434,"featured":91,"template":698},"reduce-supply-chain-risk-with-smarter-vulnerability-prioritization","content:en-us:blog:reduce-supply-chain-risk-with-smarter-vulnerability-prioritization.yml","Reduce Supply Chain Risk With Smarter Vulnerability Prioritization","en-us/blog/reduce-supply-chain-risk-with-smarter-vulnerability-prioritization.yml","en-us/blog/reduce-supply-chain-risk-with-smarter-vulnerability-prioritization",{"_path":3440,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3441,"content":3446,"config":3451,"_id":3453,"_type":14,"title":3454,"_source":16,"_file":3455,"_stem":3456,"_extension":19},"/en-us/blog/refactoring-javascript-to-typescript-with-gitlab-duo-workflow",{"title":3442,"description":3443,"ogTitle":3442,"ogDescription":3443,"noIndex":6,"ogImage":2192,"ogUrl":3444,"ogSiteName":685,"ogType":686,"canonicalUrls":3444,"schema":3445},"Refactoring JavaScript to TypeScript with GitLab Duo Workflow","Learn how we used our autonomous AI agent, which sits in your development environment, to convert a real-world JavaScript application to TypeScript.","https://about.gitlab.com/blog/refactoring-javascript-to-typescript-with-gitlab-duo-workflow","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Refactoring JavaScript to TypeScript with GitLab Duo Workflow\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Frédéric Caplette\"}],\n        \"datePublished\": \"2025-05-22\",\n      }",{"title":3442,"description":3443,"authors":3447,"heroImage":2192,"date":2844,"body":3449,"category":801,"tags":3450},[3448],"Frédéric Caplette","TypeScript adoption continues to grow, with over 88% of developers reporting\nthey either use or want to use it. Yet, migrating existing JavaScript\ncodebases to TypeScript is often a time-consuming process. Enter [GitLab Duo\nWorkflow](https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai/):\nsecure, agentic AI that sits right inside your development environment,\nhelping transform high-level tasks into executable workflows. In this\narticle, you'll learn how we used Duo Workflow to update Duo Workflow,\nconverting a real-world JavaScript application to TypeScript. We'll also\nreview the technical process and broader implications for development\nworkflows.\n\n\nThis video walks through visually what you'll read below:\n\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe\nsrc=\"https://player.vimeo.com/video/1085078036?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\"\nframeborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture;\nclipboard-write; encrypted-media\"\nstyle=\"position:absolute;top:0;left:0;width:100%;height:100%;\"\ntitle=\"Refactor JavaScript to TypeScript with GitLab Duo\nWorkflow\">\u003C/iframe>\u003C/div>\u003Cscript\nsrc=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n\n## The challenge: Refactor JS to TS\n\n\nWe decided to migrate Duo Workflow client-related logic to TypeScript for\nbetter type safety and auto-complete. A JavaScript-to-TypeScript migration\ninvolves more than just changing file extensions. It requires:\n\n\n1. Analyzing existing code patterns to determine appropriate types\n\n2. Handling edge cases where type inference is ambiguous\n\n3. Ensuring consistency across the codebase\n\n4. Managing dependencies and third-party libraries\n\n5. Validating that runtime behavior remains unchanged\n\n\nDoing all of this work manually can be very time consuming and not much fun.\nThankfully, managing your projects is easier and more efficient with Duo\nWorkflow – even when the project is Duo Workflow.\n\n\n## How it works\n\n\nIf you've used AI coding assistants before, you're likely familiar with\ntheir chat-based interfaces (like [GitLab Duo\nChat](https://about.gitlab.com/blog/gitlab-duo-chat-now-generally-available/)).\nThese chats can provide code snippets, suggested directory structures, and\nmore. GitLab Duo Workflow fundamentally differs from these in several key\nways:\n\n\n1. It's contextually aware of your entire codebase, not just open files or\nhighlighted text. You can also add context from relevant GitLab issues or\nmerge requests.\n\n2. It develops and executes a multi-stage plan, allowing for more complete\nsolutions.\n\n3. It keeps a human in the loop, ensuring you understand and approve of what\nis happening at each step of the way.\n\n4. It stages changes to your repository, giving you control over what to\naccept.\n\n\nDuo Workflow uses an agentic AI approach, powered by Claude 3.5 Sonnet, to\nunderstand code semantics while maintaining a deep integration with GitLab's\nDevOps platform.\n\n\nLet's walk through a real-world example of refactoring a JavaScript file\nusing GitLab Duo Workflow. In our case, we started with the prompt:\n\n\n```unset\n\nI need to refactor a JavaScript file\npackages/webview_duo_workflow/src/app/stores/workflow.js to TypeScript.\nPlease analyze the code, identify any issues, and convert it to TypeScript\nwith appropriate type definitions.\n\n```\n\n\n### Analysis\n\n\nAfter prompting, Duo Workflow first analyzed the codebase structure:\n\n\n* Identified that `workflow.js` is a Pinia store implementation\n\n* Noted it uses imports from various modules including `constants.ts` (which\nwas already in TypeScript)\n\n* Recognized GraphQL queries import exists but the file wasn't accessible\n\n\nBased on this analysis, Duo Workflow identified specific typing needs:\n\n\n**Type definitions needed for:**\n\n\n* Store State interface with precise types:\n  * `areWorkflowsLoading`: boolean\n  * `isLoadingWorkflow`: boolean\n  * `isLoadingRecentWorkflows`: boolean\n  * `recentWorkflows`: array\n  * `workflows`: array\n  * `workflowsPageInfo`: object\n  * `initialState`: object\n  * `activeWorkflow`: object with specific shape\n\n**Interface definitions required:**\n\n\n* Workflow interface with properties:\n  * `id`: string\n  * `goal`: string\n  * `status`: string\n  * `checkpoint`: object\n  * `type`: string\n\n**Parameter types needed for:**\n\n\n* GraphQL response types for workflows data\n\n* Event handler parameter types\n\n* Function parameter types for actions like `getWorkflowById`,\n`runWorkflow`, etc.\n\n\nDuo Workflow identified several complex patterns requiring special handling:\n\n\n* Uses optional chaining extensively\n\n* Has nested object destructuring\n\n* Contains GraphQL integration\n\n* Uses Pinia store patterns with state, getters, and actions\n\n* Implements an events mapping system\n\n\nThe system recognized external type dependencies:\n\n\n* Pinia store types\n\n* GraphQL query response types\n\n* Functions from an internal Bridge module were also properly recognized\n(for `sendRequest`, `sendGraphqlRequest`)\n\n\nThe final analysis concluded: \"The file will need to be renamed from .js to\n.ts and appropriate type definitions added while maintaining the existing\nfunctionality.\"\n\n\n### Planning\n\n\nBased on this thorough analysis, Duo Workflow created a structured plan\nconsisting of specific tasks:\n\n\n1. **Create a new TypeScript file** `workflow.ts` in the same directory as\nthe original JS file.\n\n\n2. **Create TypeScript interfaces** for the store state and workflow types\nat the top of the new file, including:\n\n   * WorkflowState interface for store state\n   * Workflow interface for workflow object structure\n   * WorkflowPageInfo interface for pagination info\n   * WorkflowCheckpoint interface for checkpoint data\n\n3. **Copy and add type annotations** to the store implementation from\n`workflow.js`, including:\n\n   * State return type\n   * Getter return types\n   * Action parameter types\n   * Action return types\n\n4. **Update all imports** in `workflow.ts` to:\n\n   * Add type imports from Pinia\n   * Update local imports to use .ts extensions where applicable\n   * Import any required type definitions\n\n### Execution\n\n\nAfter the plan is complete, we were prompted to  “Approve plan.” Before\nclicking approve, we reviewed each step and ensured we were comfortable with\nthe plan. After approval, Duo Workflow showed its progress through each step\nwith visual indicators and explanations of what API operations were\nsupporting each task (like \"Supported by: create_file_with_contents\" or\n\"Supported by: edit\\_file\"). When the work was done, we reviewed the changes\nbefore committing.\n\n\n## What we learned\n\n\nThis JavaScript-to-TypeScript migration example showcases a significant\nevolution in AI-assisted development. What makes GitLab Duo Workflow\nparticularly interesting is its approach to:\n\n\n### Task-oriented programming vs. suggestion-only assistance\n\n\nUnlike many AI assistants that simply offer code snippets or suggestions,\nDuo Workflow understands and executes complete tasks. The difference is\nsignificant — rather than saying \"here's some TypeScript code you might\nuse,\" it says \"I'll convert this file for you, here's my plan, and here are\nthe changes I'm making.\"\n\n\n### Contextual understanding of the entire codebase\n\n\nThe tool demonstrates awareness of project structure, related files (like\nconstants.ts and GraphQL queries), and the relationships between components.\nThis contextual understanding allows for more sophisticated conversions than\nlocalized transformations.\n\n\n### Step-by-step execution with visibility\n\n\nThe plan-based approach, with clear steps and progress indicators, provides\ntransparency into what would otherwise be a black-box process. This allows\ndevelopers to understand what the AI is doing and how it's approaching the\nproblem.\n\n\n> GitLab Duo Workflow is currently available in private beta for GitLab\nUltimate customers. [Sign up for the waitlist\ntoday!](https://about.gitlab.com/gitlab-duo/agent-platform/)\n\n\n## Learn more\n\n\n- [Agentic AI guides and\nresources](https://about.gitlab.com/blog/agentic-ai-guides-and-resources/)\n\n- [GitLab Duo\nWorkflow](https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai/)\n\n- [What is agentic AI?](https://about.gitlab.com/topics/agentic-ai/)\n",[803,719,9,695],{"slug":3452,"featured":91,"template":698},"refactoring-javascript-to-typescript-with-gitlab-duo-workflow","content:en-us:blog:refactoring-javascript-to-typescript-with-gitlab-duo-workflow.yml","Refactoring Javascript To Typescript With Gitlab Duo Workflow","en-us/blog/refactoring-javascript-to-typescript-with-gitlab-duo-workflow.yml","en-us/blog/refactoring-javascript-to-typescript-with-gitlab-duo-workflow",{"_path":3458,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3459,"content":3464,"config":3469,"_id":3471,"_type":14,"title":3472,"_source":16,"_file":3473,"_stem":3474,"_extension":19},"/en-us/blog/rise-of-protestware",{"title":3460,"description":3461,"ogTitle":3460,"ogDescription":3461,"noIndex":6,"ogImage":1532,"ogUrl":3462,"ogSiteName":685,"ogType":686,"canonicalUrls":3462,"schema":3463},"Protestware threats: How to protect your software supply chain","Some people protest for change by changing code others depend on throughout the software supply chain. Learn more about protestware, its impact, and how to protect against it.","https://about.gitlab.com/blog/rise-of-protestware","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Protestware threats: How to protect your software supply chain\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Abubakar Siddiq Ango\"}],\n        \"datePublished\": \"2023-05-09\",\n      }",{"title":3460,"description":3461,"authors":3465,"heroImage":1532,"date":3466,"body":3467,"category":1277,"tags":3468},[1538],"2023-05-09","In 2016, the continuous integration (CI) pipelines of millions of projects\nfailed because a developer decided to pull their projects from npm package\nregistry in [protest of a request to take down or rename one of their\npackages](https://www.theregister.com/2016/03/23/npm_left_pad_chaos/). In\nJanuary 2022, the maintainer of the widely used 'colors' and 'faker'\npackages on the npm registry modified [these\nprojects](https://blog.sonatype.com/npm-libraries-colors-and-faker-sabotaged-in-protest-by-their-maintainer-what-to-do-now?hsLang=en-us),\nadding malicious code that infinitely printed gibberish in protest of\ncorporations who use open source projects without giving back. These are two\nexamples of \"protestware,\" a term that refers to software packages or\napplications that have been intentionally modified to send a political\nmessage. The impacts may range from seeing unexpected messages in a terminal\nor logs when building an application to serious adverse impacts like data\ndeletion. \n\n\nWhile protestware remained rare for a long time, recent high-profile\nincidents have brought it back into the spotlight. Similar code injection\nvariants like\n[typosquatting](https://www.kaspersky.com/resource-center/definitions/what-is-typosquatting)\npackages (as in the case of the [colors\nnpm](https://www.mend.io/resources/blog/new-typosquating-attack-on-npm-package-colors-using-cross-language-technique-explained/)\npackage, where bad actors created compromised clones of packages with\nsimilar names) and compromised packages (as in the case of the [ctx PyPI\npackages](https://www.theregister.com/2022/05/24/pypi_ctx_package_compromised/))\nare usually perpetrated by bad actors looking to cause harm. Protestware is\nunusual in that the custodians of projects trusted by the community have\nallowed or made these changes. Regardless of whether the changes' impacts\nare harmful, such changes raise ethical concerns and can create unwanted\ndistractions. These risks also reinforce the need for open source consumers\nto adopt a [zero trust security\nmodel](/blog/why-devops-and-zero-trust-go-together/) for their\nsoftware supply chain. Trust, but verify!\n\n\nThe world is going through unprecedented movements demanding change, and\nchange seekers will find new and often disruptive ways to be heard, as we\nhave seen in the case of everything from climate activism to TikTok\nchallenges. Software supply chains are not exempt and, as we have learned\nfrom past incidents, being proactive is key to staying secure.\n\n\nHere are some steps you can take to protect your software supply chain by\nensuring your dependencies are secure.\n\n\n## Implement dependency scanning\n\n\n[Dependency\nscanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/)\nis now an industry standard, and there is no shortage of tools or libraries\nto scan your packages, containers, or any other binary formats for\nvulnerabilities. Using GitLab CI’s\n[`rules:exists`](https://docs.gitlab.com/ee/ci/yaml/#rulesexists) rule,\nGitLab checks for the presence of certain files to determine the appropriate\nscans to check for vulnerabilities. Coupled with [Vulnerability\nReports](https://docs.gitlab.com/ee/user/application_security/vulnerability_report/),\n[Policy\nManagement](https://docs.gitlab.com/ee/user/application_security/policies/index.html#policy-management),\nand the [Security\nDashboard](https://docs.gitlab.com/ee/user/application_security/security_dashboard/index.html),\nyour security team and organization can stay ahead of vulnerabilities. To\ninclude dependency scanning in your CI pipeline, add the following lines to\nyour `.gitlab-ci.yml` file. You can explore the [Dependency Scanning CI\ntemplate](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.gitlab-ci.yml)\nto understand how it works. \n\n\n```\n\n\ninclude:\n\n  template: Jobs/Dependency-Scanning.gitlab-ci.yml\n\n```\n\n\nRunning the CI script against an example [Ruby on Rails\nproject](https://gitlab.com/gitlab-de/playground/ruby-rails-demo) with Ruby\n3.0.4, the [Vulnerability\nReport](https://gitlab.com/gitlab-de/playground/ruby-rails-demo/-/security/vulnerability_report/?scanner=GitLab.DEPENDENCY_SCANNING)\nshows more than 70 vulnerabilities detected for the dependencies in the\nproject’s\n[Gemfile](https://gitlab.com/gitlab-de/playground/ruby-rails-demo/-/blob/master/Gemfile).\n\n\n\n![Vulnerability Report\nImage](https://about.gitlab.com/images/blogimages/2023-04-rise-of-protestware/vulnerability-report.png\n\"Vulnerability Report Image\")\n\n\n\n## Generate provenance validations\n\n\nUsers of packages can verify they are not downloading a compromised version\nusing [artifact\nattestation](https://docs.gitlab.com/ee/ci/runners/configure_runners.html#artifact-attestation),\nwhich was [introduced in GitLab Runner\n15.1](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28940/).\nAttestation metadata is generated in the [in-toto\nformat](https://github.com/in-toto/attestation); it provides\n[provenance](https://slsa.dev/provenance/v0.2) attesting to how a binary was\nbuilt, and you can verify the artifacts against the provenance. This allows\nyou to achieve [Level\n2](/blog/achieve-slsa-level-2-compliance-with-gitlab/) of the\nSupply-chain Levels for Software Artifacts ([SLSA](https://slsa.dev/))\nsecurity framework. \n\n\nThe demo video below shows how to configure your CI script to generate\nartifact attestation metadata.\n\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/MlIdqrDgI8U\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\n\n## Utilize private registries\n\n\n[Self-hosting registries](https://docs.gitlab.com/ee/user/packages/) for\npackages, container images, or your Terraform modules are a more secure way\nof ensuring secure and vetted packages are used by your team. Security and\ncompliance teams are enabled to ensure total control of the dependencies\nused in the entire organization and how they are accessed with [package\nregistry\npermissions](https://docs.gitlab.com/ee/user/packages/package_registry/index.html#package-registry-visibility-permissions).\nGitLab supports container, infrastructure, and package registries. Package\nregistries supported include Composer (PHP), Conan (C/C++), Generic, Maven\n(Java), npm (NodeJS), NuGet (Windows packaging), PyPI (Python), and RubyGems\n(Ruby).\n\n\n## Enable Dependency Proxy\n\nThe [Dependency\nProxy](https://docs.gitlab.com/ee/user/packages/dependency_proxy/index.html)\nreduces the number of requests made to upstream dependency registries by\nacting as a local proxy. This reduces the impact of changes or\nvulnerabilities in the upstream packages, as a clean version will still be\nstored in the Dependency Proxy’s cache. This offers faster build times,\nsince the cache is most likely closer to the build system that needs the\nimage, and it ensures continuity when an upstream registry is having\ndowntime or enforcing rate limits — as in the case of [Docker\nHub](https://docs.docker.com/docker-hub/download-rate-limit/), which has a\nlimit of 100 container image pulls per 6 hours per IP address container\nimage for anynomous users as of the time of writing this article.\n\n\nYou can enable Dependency Proxy in the Packages and Registries section of a\ngroup’s settings. Only an administrator can enable/disable the Dependency\nProxy for a GitLab instance. \n\n\n![Dependency Proxy setting\nimage](https://about.gitlab.com/images/blogimages/2023-04-rise-of-protestware/dependency-proxy.png\n\"Dependency Proxy Setting Image\")\n\n\n\nTo use the Dependency Proxy in your CI script, you can use the\n`CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX` predefined variable as shown below:\n\n\n```\n\n\n# .gitlab-ci.yml\n\n\nimage: ${CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX}/ubuntu:latest\n\n\n```\n\n\nThe GitLab Runner automatically authenticates with the Dependency Proxy, but\nif your use case requires manual authentication, like building container\nimages, you can use other predefined CI/CD variables as detailed in the\n[documentation](https://docs.gitlab.com/ee/user/packages/dependency_proxy/index.html).\n\n\nGitLab is also working on leveraging the Dependency Proxy to give more\ncontrol to security teams with the [Dependency\nFirewall](https://about.gitlab.com/direction/package/#dependency-firewall),\nwhich will allow for control of how upstream packages are used and how they\nimpact the organization. Package validation and version management can be\nmanaged from a central location without impacting the workflow of users.\n\n\nProactively instrumenting your software development lifecycle to ensure\ncontinuous review of your application along with controls is critical to\nkeeping your software supply chain secure and preventing production problems\ndue to protestware.\n",[951,2493,693,9],{"slug":3470,"featured":6,"template":698},"rise-of-protestware","content:en-us:blog:rise-of-protestware.yml","Rise Of Protestware","en-us/blog/rise-of-protestware.yml","en-us/blog/rise-of-protestware",{"_path":3476,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3477,"content":3483,"config":3488,"_id":3490,"_type":14,"title":3491,"_source":16,"_file":3492,"_stem":3493,"_extension":19},"/en-us/blog/safe-without-silos-in-gitlab",{"title":3478,"description":3479,"ogTitle":3478,"ogDescription":3479,"noIndex":6,"ogImage":3480,"ogUrl":3481,"ogSiteName":685,"ogType":686,"canonicalUrls":3481,"schema":3482},"SAFe without silos in GitLab","Learn how to map the Scaled Agile Framework to the native capabilities of the DevSecOps platform and the advantages that come from doing so.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097569/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2811%29_2hcwWx49wQ7CHfvhhkVH6S_1750097569126.png","https://about.gitlab.com/blog/safe-without-silos-in-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"SAFe without silos in GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Amanda Rueda\"}],\n        \"datePublished\": \"2025-04-08\",\n      }",{"title":3478,"description":3479,"authors":3484,"heroImage":3480,"date":3485,"body":3486,"category":1053,"tags":3487},[1050],"2025-04-08","Let's talk about what happens when your organization adopts the Scaled Agile Framework (SAFe) to scale to enterprise levels. You've got multiple teams working on complex products, and you need a way to coordinate all that work. But here's a common headache: Your planning happens in one tool, while your actual development work lives somewhere else entirely.\n\nThis divide creates real problems day-to-day. Developers jump between systems constantly. Product managers struggle to get an accurate picture of progress. And everyone wastes time manually copying information from one place to another. It's precisely the kind of disjointed experience that SAFe was designed to eliminate.\n\nWhile your development teams might already be using GitLab for source code management, CI/CD, and security, you may wonder whether GitLab can also support your planning needs within the SAFe framework. The good news is that GitLab's Agile project management capabilities offer strong support for SAFe, in this article, you'll learn how GitLab maps to SAFe concepts and ceremonies, all within the same DevSecOps platform your software developers already know and love.\n\n## What is SAFe?\n\nSAFe, or the Scaled Agile Framework, is a way to bring Agile principles to large organizations without losing speed, alignment, or customer focus. It takes the iterative and flexible teamwork model of small teams and applies its principles across big organizations that have multiple teams, roadmaps, and stakeholders. This brings the organization into alignment, all planning and executing in the same direction. For product managers, SAFe helps connect strategy to execution so you’re not just shipping fast, you’re shipping the right things, backed by clear priorities and cross-team alignment.\n\nSAFe reduces silos, encourages collaboration, and helps teams rally around customer outcomes, not just tasks. When integrated in GitLab, the magic really happens: visibility, traceability, and delivery all live in one place.\n\n## SAFe terminology in GitLab\n\nFirst, let's establish how SAFe concepts map to GitLab:\n\n| SAFe | GitLab |\n| :---- | :---- |\n| Epic | Top-level Epic |\n| Capability | Sub-epic (Level 1) |\n| Feature | Sub-epic (Level 2) |\n| User Story | Issue |\n| Task | Task |\n| Team | Custom Field / Scoped Label |\n| Sprint | Iteration |\n| Program Increment (PI) | Milestone |\n| Value Stream | Top-level Group |\n| Agile Release Train (ART) | Top-level Group |\n\n\u003Cbr>\u003C/br>\n\nWith this mapping as your guide, you can set up GitLab to mirror your SAFe implementation. The group structure lets you organize around your value streams and ARTs, while the work item hierarchy (with up to seven levels of nested epics!) gives you all the depth you need for complex product portfolios. Whether you're working at the portfolio level (with top-level groups), program level (with subgroups), or team level (with projects), GitLab's organizational structure aligns perfectly with SAFe's hierarchy.\n\n## Supporting SAFe ceremonies in GitLab\n\nNow for the fun part - how do you actually run your SAFe ceremonies in GitLab? Let's walk through each one.\n\n### PI planning\n\nTo facilitate the cross-team alignment and dependency management that makes PI planning successful, GitLab offers several capabilities:\n\n* Use the [Roadmap](https://docs.gitlab.com/user/group/roadmap/) view to visualize features across teams and time periods\n* Assign features to the PI [milestone](https://docs.gitlab.com/user/project/milestones/)\n* Document and visualize cross-team [dependencies](https://docs.gitlab.com/user/project/issues/related_issues/#blocking-issues) as they're identified\n\nGitLab gives you flexibility for PI planning through both the Epic boards (which can be configured to show team assignments) and the Roadmap view (which shows features over time like a Gantt chart). You can switch between these views during your planning session depending on whether you're focusing on the timeline or team organization.\n\n![Roadmap view and epic board](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097577/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097576746.gif)\n\n\u003Cbr>\u003C/br>\n\n![Roadmap view with Gantt chart](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097577/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750097576747.png)\n\n### Refinement\n\nAs a product manager, running effective refinement sessions means having clear visibility into your feature backlog. You can run your refinement session right inside GitLab. No more updating one tool during the meeting and then having to update another tool afterward.\n\nGitLab powers refinement sessions with:\n\n* [Epic boards](https://docs.gitlab.com/user/group/epics/epic_boards/) that group features based on status\n* The ability to view story points directly in the [overview](https://docs.gitlab.com/user/group/epics/epic_boards/#view-count-of-issues-weight-and-progress-of-an-epic)\n* Comprehensive [drawer views](https://docs.gitlab.com/user/group/epics/manage_epics/#open-epics-in-a-drawer) that let you interact with work items without losing context\n* The ability to create and link [child issues](https://docs.gitlab.com/user/group/epics/manage_epics/#add-an-issue-to-an-epic) directly from epics\n\n![SAFe - image 3](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097577/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097576749.gif)\n\n### Sprint planning\n\nWhen it's time to figure out what your team can tackle in the next sprint, GitLab gives you:\n\n* [Issue boards](https://docs.gitlab.com/user/project/issue_board/) that provide a comprehensive view of your backlog\n* [Total weight](https://docs.gitlab.com/user/project/issue_board/#sum-of-issue-weights) of user stories displayed directly on boards\n* The ability to easily move issues between iterations\n* A collapsible view that simplifies moving stories between sprints\n\nThis means you can keep everything in one place and spend your planning meetings actually planning instead of jumping between tools.\n\n![Sprint planning with GitLab](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097577/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097576751.gif)\n\n*💡 Check out [this tutorial on using GitLab to facilitate Scrum](https://docs.gitlab.com/tutorials/scrum_events/) for a detailed glimpse into the power of GitLab in Agile planning and sprint tracking.*\n\n### Daily stand-ups\n\nYour team can gather around the board during daily stand-ups and actually see what everyone's working on, what's stuck, and what's ready for review – all in one view. For your dev team's daily stand-ups, GitLab lets you:\n\n* Create [iteration-scoped](https://docs.gitlab.com/user/project/issue_board/#iteration-lists) boards that show the current sprint's work\n* Display story points/weights directly on cards\n* Use the [drawer view](https://docs.gitlab.com/user/project/issues/managing_issues/#open-issues-in-a-drawer) to access details without leaving the context\n* Highlight tasks at risk through [health status](https://docs.gitlab.com/user/project/issues/managing_issues/#health-status)\n\n![Daily stand-up board](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097577/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750097576755.png)\n\n### Sprint review\n\nWant to know how your team is doing over time? GitLab provides comprehensive metrics with:\n\n* [Burndown and burnup charts](https://docs.gitlab.com/user/group/iterations/#iteration-burndown-and-burnup-charts) for iterations\n* Velocity tracking\n* [Lead and cycle time](https://docs.gitlab.com/user/group/value_stream_analytics/#lifecycle-metrics) metrics\n* Dashboards that can be scoped to teams\n\nThese metrics help you understand if your team is getting faster, where they're getting stuck, and what you might want to talk about in your next retrospective.\n\n![Burndown and burnup charts](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097577/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097576758.png)\n\n## 5 reasons a unified platform provides an advantage\n\nI know there are plenty of planning tools that can handle SAFe ceremonies. But there are game-changing reasons why I genuinely believe GitLab is different:\n\n1. **No more context switching** - Your planning, coding, testing, and security all happen in one place.\n2. **Everything's connected** - You can trace work from the big epic down to the code and deployment.\n3. **Everyone's on the same page** - Developers, product folks, and security teams all work together in the same tool.\n4. **Total visibility** - Stakeholders have one place to check for updates.\n5. **The full picture** - You see planning and development metrics together, so you know what's really going on.\n\nIf your dev teams already love GitLab, why make them jump to another tool for planning or create some complex, cobbled-together integrations? Bringing your SAFe planning into GitLab creates a much smoother experience for everyone.\n\n## Implementation principles\n\nI've worked with teams transitioning from traditional SAFe tools to GitLab, and here's what I've learned: Focus on **what each ceremony is trying to accomplish**, not on recreating exact replicas of your old tools.\n\nThe teams that get the most out of GitLab are the ones who embrace its native capabilities instead of fighting against them. Yes, it takes some initial work to figure out how to map your SAFe concepts and set up your workflows. But once you do, you'll find your processes actually get simpler rather than more complex.\n\nThe key is defining conventions that everyone follows. Which labels mean what? How will you track teams? What goes in an epic versus an issue? With a little upfront investment in these decisions, you'll end up with an intuitive system that eliminates all that cross-tool coordination overhead.\n\n## Getting started\n\nReady to give this a shot? Here's how to start implementing SAFe in GitLab:\n\n1. **Set up your structure** - Create groups and subgroups that [match your organization](https://about.gitlab.com/blog/best-practices-to-set-up-organizational-hierarchies-that-scale/).\n2. **Define your work breakdown** - Decide how you'll use [epics](https://about.gitlab.com/blog/unlocking-agile-excellence-gitlab-epics-for-seamless-portfolio-management/), [issues](https://docs.gitlab.com/user/project/issues/managing_issues/), and [tasks](https://docs.gitlab.com/user/tasks/).\n3. **Create your iterations** - Set up your [sprint schedule](https://docs.gitlab.com/user/group/iterations/#create-an-iteration-cadence).\n4. **Add your milestones** - [Milestones](https://docs.gitlab.com/user/project/milestones/#create-a-milestone) will represent your Program Increments in GitLab.\n5. **Build your boards** - Create different views for different ceremonies.\n6. **Agree on conventions** - Document how you'll use labels and custom fields.\n\nTaking time to think through these decisions upfront will save you many headaches later. And remember, you don't have to perfect it on day one - you can always adjust as you learn.\n\n## Bringing it all together\n\nGitLab gives you a solid foundation for running SAFe, especially if your dev teams are already GitLab fans. When you bring planning and development into the same tool, you eliminate those painful handoffs, make collaboration way easier, and get everything moving faster.\n\nThe beauty of GitLab's planning tools is that they're flexible enough to adapt to your specific flavor of SAFe. You're not locked into rigid workflows - you can evolve your approach as your teams mature and your needs change.\n\n> Ready to see how much better life is without those planning silos? [Start your free trial today](https://about.gitlab.com/free-trial/) and experience firsthand how GitLab can transform your SAFe implementation.\n\n*💡 If you liked this topic check out this related post - [GitLab for Agile Software Development](https://about.gitlab.com/blog/gitlab-for-agile-software-development/)*\n",[908,9,695,717,719],{"slug":3489,"featured":91,"template":698},"safe-without-silos-in-gitlab","content:en-us:blog:safe-without-silos-in-gitlab.yml","Safe Without Silos In Gitlab","en-us/blog/safe-without-silos-in-gitlab.yml","en-us/blog/safe-without-silos-in-gitlab",{"_path":3495,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3496,"content":3502,"config":3506,"_id":3508,"_type":14,"title":3509,"_source":16,"_file":3510,"_stem":3511,"_extension":19},"/en-us/blog/secure-and-publish-python-packages-a-guide-to-ci-integration",{"title":3497,"description":3498,"ogTitle":3497,"ogDescription":3498,"noIndex":6,"ogImage":3499,"ogUrl":3500,"ogSiteName":685,"ogType":686,"canonicalUrls":3500,"schema":3501},"Secure and publish Python packages: A guide to CI integration","Learn how to implement a secure CI/CD pipeline across five stages with the GitLab DevSecOps platform.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662080/Blog/Hero%20Images/AdobeStock_1097303277.jpg","https://about.gitlab.com/blog/secure-and-publish-python-packages-a-guide-to-ci-integration","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Secure and publish Python packages: A guide to CI integration\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Tim Rizzi\"}],\n        \"datePublished\": \"2025-01-21\",\n      }",{"title":3497,"description":3498,"authors":3503,"heroImage":3499,"date":2884,"body":3504,"category":693,"tags":3505},[925],"Supply chain security is a critical concern in software development.\nOrganizations need to verify the authenticity and integrity of their\nsoftware packages. This guide will show you how to implement a secure CI/CD\npipeline for Python packages using GitLab CI, incorporating package signing\nand attestation using Sigstore's Cosign.\n\n\nYou'll learn:\n\n\n- [Why sign and attest your Python\npackages?](#why-sign-and-attest-your-python-packages%3F)\n\n- [Pipeline overview](#pipeline-overview)\n\n- [Complete pipeline implementation: Setting up the\nenvironment](#complete-pipeline-implementation-setting-up-the-environment)\n   * [Environment configuration](#environment-configuration)\n   * [Configuration breakdown](#configuration-breakdown)\n-  The 6 stages\n\n    1. [Building](#building-crafting-the-package)\n    2. [Signing](#signing-the-digital-notarization)\n    3. [Verification](#verification-the-security-checkpoint)\n    4. [Publishing](#publishing-the-controlled-release)\n    5. [Publishing signatures](#publishing-signatures-making-verification-possible)\n    6. [Consumer verification](#consumer-verification-testing-the-user-experience)\n\n## Why sign and attest your Python packages?\n\n\nHere are four reasons to sign and attest your Python packages:\n\n\n* **Supply chain security:** Package signing ensures that the code hasn't\nbeen tampered with between build and deployment, protecting against supply\nchain attacks.\n\n* **Compliance requirements:** Many organizations, especially in regulated\nindustries, require cryptographic signatures and provenance information for\nall deployed software.\n\n* **Traceability:** Attestations provide a verifiable record of build\nconditions, including who built the package and under what circumstances.\n\n* **Trust verification:** Consumers of your package can cryptographically\nverify its authenticity before installation.\n\n\n## Pipeline overview\n\n\nEnsuring your code's integrity and authenticity is necessary. Imagine a\npipeline that doesn't just compile your code but creates a cryptographically\nverifiable narrative of how, when, and by whom your package was created.\nEach stage acts as a guardian, checking and documenting the package's\nprovenance.\n\n\nHere are six stages of a GitLab pipeline that ensure your package is secure\nand trustworthy:\n\n\n* Build: Creates a clean, standard package that can be easily shared and\ninstalled.\n\n* Signing: Adds a digital signature that proves the package hasn't been\ntampered with since it was created.\n\n* Verification: Double-checks that the signature is valid and the package\nmeets all our security requirements.\n\n* Publishing: Uploads the verified package to GitLab's package registry,\nmaking it available for others to use.\n\n* Publishing Signatures: Makes signatures available for verification.\n\n* Consumer Verification: Simulates how end users can verify package\nauthenticity.\n\n\n## Complete pipeline implementation: Setting up the environment\n\n\nBefore we build our package, we need to set up a consistent and secure build\nenvironment. This configuration ensures every package is created with the\nsame tools, settings, and security checks.\n\n\n### Environment configuration\n\n\nOur pipeline requires specific tools and settings to work correctly.\n\n\nPrimary configurations:\n\n\n* Python 3.10 for consistent builds\n\n* Cosign 2.2.3 for package signing\n\n* GitLab package registry integration\n\n* Hardcoded package version for reproducibility\n\n\n**Note about versioning:** We've chosen to use a hardcoded version\n(`\"1.0.0\"`) in this example rather than deriving it from git tags or\ncommits. This approach ensures complete reproducibility and makes the\npipeline behavior more predictable. In a production environment, you might\nwant to use semantic versioning based on git tags or another versioning\nstrategy that fits your release process.\n\n\nTool requirements:\n\n\n* Basic utilities: `curl`, `wget`\n\n* Cosign for cryptographic signing\n\n* Python packaging tools: `build`, `twine`, `setuptools`, `wheel`\n\n\n### Configuration breakdown\n\n\n```yaml\n\nvariables:\n  PYTHON_VERSION: '3.10'\n  PACKAGE_NAME: ${CI_PROJECT_NAME}\n  PACKAGE_VERSION: \"1.0.0\"\n  FULCIO_URL: 'https://fulcio.sigstore.dev'\n  REKOR_URL: 'https://rekor.sigstore.dev'\n  CERTIFICATE_IDENTITY: 'https://gitlab.com/${CI_PROJECT_PATH}//.gitlab-ci.yml@refs/heads/${CI_DEFAULT_BRANCH}'\n  CERTIFICATE_OIDC_ISSUER: 'https://gitlab.com'\n  PIP_CACHE_DIR: \"$CI_PROJECT_DIR/.pip-cache\"\n  COSIGN_YES: \"true\"\n  GENERIC_PACKAGE_BASE_URL: \"${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}\"\n```\n\n\nWe use caching to speed up subsequent builds:\n\n\n```yaml\n\ncache:\n  paths:\n    - ${PIP_CACHE_DIR}\n```\n\n\n## Building: Crafting the package\n\n\nEvery software journey begins with creation. In our pipeline, the build\nstage is where raw code transforms into a distributable package, ready to\ntravel across different Python environments.\n\n\nThe build process creates two standardized formats:\n\n\n* a wheel package (.whl) for quick, efficient installation\n\n* a source distribution (.tar.gz) that carries the complete code\n\n\nHere's the build stage implementation:\n\n\n```yaml\n\nbuild:\n  extends: .python-job\n  stage: build\n  script:\n    - git init\n    - git config --global init.defaultBranch main\n    - git config --global user.email \"ci@example.com\"\n    - git config --global user.name \"CI\"\n    - git add .\n    - git commit -m \"Initial commit\"\n    - export NORMALIZED_NAME=$(echo \"${CI_PROJECT_NAME}\" | tr '-' '_')\n    - sed -i \"s/name = \\\".*\\\"/name = \\\"${NORMALIZED_NAME}\\\"/\" pyproject.toml\n    - sed -i \"s|\\\"Homepage\\\" = \\\".*\\\"|\\\"Homepage\\\" = \\\"https://gitlab.com/${CI_PROJECT_PATH}\\\"|\" pyproject.toml\n    - python -m build\n  artifacts:\n    paths:\n      - dist/\n      - pyproject.toml\n```\n\n\nLet's break down what this build stage does:\n\n\n1. Initializes a Git repository (`git init`) and configures it with basic\nsettings\n\n2. Normalizes the package name by converting hyphens to underscores, which\nis required for Python packaging\n\n3. Updates the package metadata in `pyproject.toml` to match our project\nsettings\n\n4. Builds both wheel and source distribution packages using `python -m\nbuild`\n\n5. Preserves the built packages and configuration as artifacts for\nsubsequent stages\n\n\n## Signing: The digital notarization\n\n\nIf attestation is the package's biography, signing is its cryptographic seal\nof authenticity. This is where we transform our package from a mere\ncollection of files into a verified, tamper-evident artifact.\n\n\nThe signing stage uses Cosign to apply a digital signature as an unbreakable\nseal. This isn't just a stamp — it's a complex cryptographic handshake that\nproves the package's integrity and origin.\n\n\n```yaml\n\nsign:\n  extends: .python+cosign-job\n  stage: sign\n  id_tokens:\n    SIGSTORE_ID_TOKEN:\n      aud: sigstore\n  script:\n    - |\n      for file in dist/*.whl dist/*.tar.gz; do\n        if [ -f \"$file\" ]; then\n          filename=$(basename \"$file\")\n          cosign sign-blob --yes \\\n            --fulcio-url=${FULCIO_URL} \\\n            --rekor-url=${REKOR_URL} \\\n            --oidc-issuer $CI_SERVER_URL \\\n            --identity-token $SIGSTORE_ID_TOKEN \\\n            --output-signature \"dist/${filename}.sig\" \\\n            --output-certificate \"dist/${filename}.crt\" \\\n            \"$file\"\n        fi\n      done\n  artifacts:\n    paths:\n      - dist/\n```\n\n\nThis signing stage performs several crucial operations:\n\n\n1. Obtains an OIDC token from GitLab for authentication with Sigstore\nservices\n\n2. Processes each built package (both wheel and source distribution)\n\n3. Uses Cosign to create a cryptographic signature (`.sig`) for each package\n\n4. Generates a certificate (`.crt`) that proves the signature's authenticity\n\n5. Stores both signatures and certificates alongside the packages as\nartifacts\n\n\n## Verification: The security checkpoint\n\n\nVerification is our final quality control gate. It's not just a check — it's\na security interrogation where every aspect of the package is scrutinized.\n\n\n```yaml\n\nverify:\n  extends: .python+cosign-job\n  stage: verify\n  script:\n    - |\n      failed=0\n      for file in dist/*.whl dist/*.tar.gz; do\n        if [ -f \"$file\" ]; then\n          filename=$(basename \"$file\")\n          if ! cosign verify-blob \\\n            --signature \"dist/${filename}.sig\" \\\n            --certificate \"dist/${filename}.crt\" \\\n            --certificate-identity \"${CERTIFICATE_IDENTITY}\" \\\n            --certificate-oidc-issuer \"${CERTIFICATE_OIDC_ISSUER}\" \\\n            \"$file\"; then\n            failed=1\n          fi\n        fi\n      done\n      if [ $failed -eq 1 ]; then\n        exit 1\n      fi\n```\n\n\nThe verification stage implements several security checks:\n\n\n1. Examines each package file in the `dist` directory\n\n2. Uses Cosign to verify the signature matches the package content\n\n3. Confirms the certificate's identity matches our expected GitLab pipeline\nidentity\n\n4. Validates our trusted OIDC provider issued the certificate\n\n5. Fails the entire pipeline if any verification check fails, ensuring only\nverified packages proceed\n\n\n## Publishing: The controlled release\n\n\nPublishing is where we make our verified packages available through GitLab's\npackage registry. It's a carefully choreographed release that ensures only\nverified, authenticated packages reach their destination.\n\n\n```yaml\n\npublish:\n  extends: .python-job\n  stage: publish\n  script:\n    - |\n      cat \u003C\u003C EOF > ~/.pypirc\n      [distutils]\n      index-servers = gitlab\n      [gitlab]\n      repository = ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi\n      username = gitlab-ci-token\n      password = ${CI_JOB_TOKEN}\n      EOF\n      TWINE_PASSWORD=${CI_JOB_TOKEN} TWINE_USERNAME=gitlab-ci-token \\\n        twine upload --repository-url ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/pypi \\\n        dist/*.whl dist/*.tar.gz\n```\n\n\nThe publishing stage handles several important tasks:\n\n\n1. Creates a `.pypirc` configuration file with GitLab package registry\ncredentials\n\n2. Uses the GitLab CI job token for secure authentication\n\n3. Uploads both wheel and source distribution packages to the GitLab PyPI\nregistry\n\n4. Makes the packages available for installation via pip\n\n\n## Publishing signatures: Making verification possible\n\n\nAfter publishing the packages, we must make their signatures and\ncertificates available for verification. We store these in GitLab's generic\npackage registry, making them easily accessible to users who want to verify\npackage authenticity.\n\n\n```yaml\n\npublish_signatures:\n  extends: .python+cosign-job\n  stage: publish_signatures\n  script:\n    - |\n      for file in dist/*.whl dist/*.tar.gz; do\n        if [ -f \"$file\" ]; then\n          filename=$(basename \"$file\")\n          curl --header \"JOB-TOKEN: ${CI_JOB_TOKEN}\" \\\n               --fail \\\n               --upload-file \"dist/${filename}.sig\" \\\n               \"${GENERIC_PACKAGE_BASE_URL}/${filename}.sig\"\n\n          curl --header \"JOB-TOKEN: ${CI_JOB_TOKEN}\" \\\n               --fail \\\n               --upload-file \"dist/${filename}.crt\" \\\n               \"${GENERIC_PACKAGE_BASE_URL}/${filename}.crt\"\n        fi\n      done\n```\n\n\nThe signature publishing stage performs these key operations:\n\n\n1. Processes each built package to find its corresponding signature files\n\n2. Uses the GitLab API to upload the signature (`.sig`) file to the generic\npackage registry\n\n3. Uploads the corresponding certificate (`.crt`) file\n\n4. Makes these verification artifacts available for downstream package\nconsumers\n\n5. Uses the same version and package name to maintain the connection between\npackages and signatures\n\n\n## Consumer verification: Testing the user experience\n\n\nThe final stage simulates how end users will verify your package's\nauthenticity. This stage acts as a final check and a practical example of\nthe verification process.\n\n\n```yaml\n\nconsumer_verification:\n  extends: .python+cosign-job\n  stage: consumer_verification\n  script:\n    - |\n      git init\n      git config --global init.defaultBranch main\n      mkdir -p pkg signatures\n\n      pip download --index-url \"https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.com/api/v4/projects/${CI_PROJECT_ID}/packages/pypi/simple\" \\\n          \"${NORMALIZED_NAME}==${PACKAGE_VERSION}\" --no-deps -d ./pkg\n\n      pip download --no-binary :all: \\\n          --index-url \"https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.com/api/v4/projects/${CI_PROJECT_ID}/packages/pypi/simple\" \\\n          \"${NORMALIZED_NAME}==${PACKAGE_VERSION}\" --no-deps -d ./pkg\n\n      failed=0\n      for file in pkg/*.whl pkg/*.tar.gz; do\n        if [ -f \"$file\" ]; then\n          filename=$(basename \"$file\")\n          sig_url=\"${GENERIC_PACKAGE_BASE_URL}/${filename}.sig\"\n          cert_url=\"${GENERIC_PACKAGE_BASE_URL}/${filename}.crt\"\n\n          curl --fail --silent --show-error \\\n               --header \"JOB-TOKEN: ${CI_JOB_TOKEN}\" \\\n               --output \"signatures/${filename}.sig\" \\\n               \"$sig_url\"\n\n          curl --fail --silent --show-error \\\n               --header \"JOB-TOKEN: ${CI_JOB_TOKEN}\" \\\n               --output \"signatures/${filename}.crt\" \\\n               \"$cert_url\"\n\n          if ! cosign verify-blob \\\n            --signature \"signatures/${filename}.sig\" \\\n            --certificate \"signatures/${filename}.crt\" \\\n            --certificate-identity \"${CERTIFICATE_IDENTITY}\" \\\n            --certificate-oidc-issuer \"${CERTIFICATE_OIDC_ISSUER}\" \\\n            \"$file\"; then\n            failed=1\n          fi\n        fi\n      done\n\n      if [ $failed -eq 1 ]; then\n        exit 1\n      fi\n```\n\n\nThis consumer verification stage simulates the end-user experience by:\n\n\n1. Creating a clean environment to test package installation\n\n2. Downloading the published packages from the GitLab PyPI registry\n\n3. Retrieving the corresponding signatures and certificates from the generic\npackage registry\n\n4. Performing the same verification steps that end users would perform\n\n5. Ensuring the entire process works from a consumer's perspective\n\n6. Failing the pipeline if any verification step fails, providing an early\nwarning of any issues\n\n\n## Summary\n\n\nThis comprehensive pipeline provides a secure and reliable way to build,\nsign, and publish Python packages to GitLab's package registry. By following\nthese practices and implementing the suggested security measures, you can\nensure your packages are appropriately verified and safely distributed to\nyour users.\n\n\nThe pipeline combines modern security practices with efficient automation to\ncreate a robust software supply chain. Using Sigstore's Cosign for signing\nand attestation, along with GitLab's built-in security features, you can\nprovide users with trustworthy cryptographically verified packages.\n\n\n> #### Get started on your security journey today with a [free trial\nof GitLab\nUltimate](https://gitlab.com/-/trials/new?glm_content=default-saas-trial&glm_source=about.gitlab.com).\n\n\n## Learn more\n\n- [Documentation: Use Sigstore for keyless signing and\nverification](https://docs.gitlab.com/ee/ci/yaml/signing_examples.html)\n\n- [Streamline security with keyless signing and verification in\nGitLab](https://about.gitlab.com/blog/keyless-signing-with-cosign/)\n\n- [Annotate container images with build provenance using Cosign in GitLab\nCI/CD](https://about.gitlab.com/blog/annotate-container-images-with-build-provenance-using-cosign-in-gitlab-ci-cd/)\n",[693,233,284,695,1542,109,9,719,930],{"slug":3507,"featured":91,"template":698},"secure-and-publish-python-packages-a-guide-to-ci-integration","content:en-us:blog:secure-and-publish-python-packages-a-guide-to-ci-integration.yml","Secure And Publish Python Packages A Guide To Ci Integration","en-us/blog/secure-and-publish-python-packages-a-guide-to-ci-integration.yml","en-us/blog/secure-and-publish-python-packages-a-guide-to-ci-integration",{"_path":3513,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3514,"content":3520,"config":3525,"_id":3527,"_type":14,"title":3528,"_source":16,"_file":3529,"_stem":3530,"_extension":19},"/en-us/blog/secure-and-safe-login-and-commits-with-gitlab-yubico",{"title":3515,"description":3516,"ogTitle":3515,"ogDescription":3516,"noIndex":6,"ogImage":3517,"ogUrl":3518,"ogSiteName":685,"ogType":686,"canonicalUrls":3518,"schema":3519},"Secure and safe login and commits with GitLab + Yubico","Learn how GitLab and Yubico have partnered to strengthen software development security through robust authentication measures.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663259/Blog/Hero%20Images/REFERENCE_-_display_preview_for_blog_images__3_.png","https://about.gitlab.com/blog/secure-and-safe-login-and-commits-with-gitlab-yubico","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Secure and safe login and commits with GitLab + Yubico\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2025-04-02\",\n      }",{"title":3515,"description":3516,"authors":3521,"heroImage":3517,"date":3522,"body":3523,"category":693,"tags":3524},[1599],"2025-04-02","We live in a time where data breaches and phishing attacks make daily\nheadlines. These breaches can cause harm to an organization, such as\nregulatory fines, business downtime, or even worse, reputational damage. In\nterms of authentication, passwords have been the backbone of online security\nfor decades, however, they're increasingly proving inadequate against\nsophisticated cyber threats.\n\n\nGitLab and [Yubico](https://www.yubico.com/) have partnered to strengthen\nsoftware development security through robust authentication measures. Yubico\nis the inventor of the YubiKey, a hardware security key that delivers\nphishing-resistant multi-factor authentication (MFA). By implementing FIDO\nUniversal 2nd Factor (U2F) and YubiKey hardware protection, GitLab offers\ndevelopers a powerful defense against phishing attacks and other cyber\nthreats, ensuring their code and projects remain secure. This collaboration\nexpands enterprise-grade authentication in the GitLab platform, allowing\nprogrammers to focus on creating software while maintaining confidence in\ntheir account's integrity.\n\n\nThis article explains how to configure GitLab to use YubiKeys to protect\ndevelopers from online threats. You’ll also learn how to further prevent\ntampering with GitLab verified commits.\n\n\n## How YubiKeys work\n\n\nAt their core, YubiKeys function as cryptographic hardware tokens that\ngenerate and store private keys in a secure element. These keys implement\nFIDO2/WebAuthn authentication protocols, which can be used as an additional\nfactor to login to GitLab.\n\n\nHere's how it works when logging in:\n\n\n1. You enter your username and password.  \n\n2. GitLab sends a cryptographic challenge to your browser.  \n\n3. Your browser requests the YubiKey to sign this challenge.  \n\n4. You physically touch the YubiKey to approve.\n\n5. The YubiKey creates a unique cryptographic signature for that specific\nservice and challenge.  \n\n6. GitLab verifies the signature using your public key stored during setup.\n\n\nMost major security breaches involve compromised passwords. Adding a YubiKey\nsecures your account from a remote breach, even if your password is stolen,\nso you can rest assured that your GitLab account is secure. Additional key\nsecurity benefits of using YubiKey for authentication with GitLab include:\n\n\n* **Phishing protection:** Fake sites won't have the correct cryptographic\nkeys to verify the response. \n\n* **No secrets to steal:** The private key never leaves the YubiKey.  \n\n* **Physical security:** Physical presence is required to use it (you must\ntouch the YubiKey).\n\n\n## Setting up YubiKey multifactor authentication in GitLab\n\n\nNow let’s go over how to set up a Yubikey for multifactor authentication in\nGitLab. Make sure you're using a [supported browser and operating\nsystem](https://support.yubico.com/hc/en-us/articles/360016615020-Operating-system-and-web-browser-support-for-FIDO2-and-U2F)\nas they have better WebAuthn support for hardware security keys.\n\n\n1. First, log in to your GitLab account and go to your user settings (click\nyour avatar in the top left corner and select **Preferences**). \n\n2. In the left sidebar, click on **Account** and navigate to the\n**Two-factor Authentication** section.\n\n3. If you haven't already enabled 2FA, you'll need to do that first.\n\n    a. Click **Enable two-factor authentication**.\n\n    b. Scan the QR code with your authenticator app.\n\n    c. Enter the code from your authenticator app.\n\n    d. Enter your GitLab password. If you ever need to access your GitLab account without using Google authentication, you may need to:\n    * Use the **Forgot password** option on the GitLab login page to set up a separate GitLab password.\n    * Contact your GitLab administrator to help you set up alternative login methods.\n\n   e. Save your recovery codes in a safe place.\n\n4. Once 2FA is enabled, go back to the previous screen by pressing **Manage\ntwo-factor authentication** and scroll down to the **Register hardware\ntoken** section.  \n\n5. Press the **Set up new device** button.  \n    a. A popup from your browser should appear. **Note:** This image may look different depending on your browser. You may also get popups from password managers feel free to ignore them. \n\n![Browser (Brave) Auth\nRequest](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674606/Blog/Content%20Images/browser_auth_request.png)\n\n\n&nbsp; &nbsp; b. Select **Use a phone, tablet, or security key**.\n\n\n6. A new popup will appear.\n\n\n![browser security key\nrequest](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674607/Blog/Content%20Images/browser_security_key_request.png)\n\n\n&nbsp; &nbsp; a. Insert your YubiKey into your computer's USB port.\n\n\n&nbsp; &nbsp; b. Touch the metal contact/button on your YubiKey when\nprompted. The field will automatically fill with a one-time code.\n\n\n7. Enter your GitLab Password and provide a name for your Hardware Key.  \n\n8. Click **Register** to add the YubiKey to your account.\n\n\nCongratulations, your YubiKey is now registered and can be used as a second\nfactor when logging into GitLab! You can register multiple YubiKeys to your\naccount for backup purposes. **Note:** The process may vary slightly among\nbrowsers.\n\n\n![yubikey\nregistered](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674607/Blog/Content%20Images/yubikey_registered.png)\n\n\n\u003Ccenter>\u003Ci>YubiKey registered successfully\u003C/i>\u003C/center>\n\n\n## Signing in with a YubiKey\n\n\nNow that we have our YubiKey configured, we can log in as follows:\n\n\n1. Go to GitLab.com.\n\n\n![GitLab\nlogin](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674607/Blog/Content%20Images/gitlab_login.png)\n\n\n2. Provide your username and password and then press the **Sign in** button.\n\n3. You will be sent to the following screen.\n\n\n![GitLab 2fa\nlogin](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674606/Blog/Content%20Images/2fa_login.png)\n\n\n&nbsp; &nbsp; a. A popup, like the one below, should come up. **Note:** This\nimage may look different depending on your browser. You may also get popups\nfrom password managers; feel free to ignore them.\n\n\n![Browser security key\nrequest](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674607/Blog/Content%20Images/browser_security_key_request.png)\n\n\n&nbsp; &nbsp; b. Insert your YubiKey into your computer's USB port.\n\n\n&nbsp; &nbsp; c. Touch the metal contact/button on your YubiKey when\nprompted. The field will automatically fill with a one-time code.\n\n\nNow, you should be logged in and taken to your GitLab page. **Note:** The\nprocess may vary slightly among browsers.\n\n\n## What happens if I lose my YubiKey?\n\n\nYubico recommends that you use and keep a backup YubiKey. When considering\nyour home, car, or office, you wouldn’t think twice about having a backup\nkey to keep in a safe place. Your digital self should get the same level of\nconsideration. A backup YubiKey kept in a safe place provides a quick and\nsafe backup if your primary YubiKey is lost. Keeping a backup will also\neasily enable you to deactivate the lost YubiKey and add a new primary or\nsecondary YubiKey.\n\n\nIf you do not have an additional YubiKey added, it is recommended to have\nanother form of 2FA added to your accounts. In either case, you should be\nable to get access to your account and remove the lost key from the account.\nPlease note that if a spare key or another authentication method hasn’t been\nadded, you will need to contact the service/website for help with recovering\nyour account.\n\n\n## GitLab verified commits\n\n\nTo further prevent tampering, you can also configure verified commits.\nVerified commits in GitLab use GPG (GNU Privacy Guard) signatures to prove\nthat a commit actually came from you. This adds another layer of security on\ntop of authentication by ensuring that not only is your account secure, but\nevery code change can be cryptographically verified as coming from you.\n\n\nYour YubiKey can store GPG keys:\n\n\n* The private key is stored securely on the YubiKey.  \n\n* The public key is shared with GitLab.\n\n* The key pair is used to sign your commits.\n\n\nOnce the GPG keys have been set up:\n\n\n* When you make a commit, Git uses your private key to create a signature.  \n\n* The GPG key is accessed from the attached YubiKey.  \n\n* The signature is stored with the commit metadata.  \n\n* GitLab verifies the signature using your public key.\n\n\n## Setting up verified commits\n\n\nLet’s go over how to configure verified commits. In this example, the GPG\nkey will live inside your YubiKey, providing an extra layer of security.\n\n\n1. Install required software.\n\n\n```bash\n\n# On macOS\n\nbrew install --cask yubico-yubikey-manager\n\nbrew install gnupg gpg yubikey-manager\n\n\n# On Ubuntu/Debian\n\nsudo apt install gnupg gpg yubikey-personalization\n\n\n# On Windows\n\n# Download and install Gpg4win from https://gpg4win.org\n\n```\n\n\n2. Check YubiKey GPG status.\n\n\n```bash\n\ngpg --card-status\n\n```\n\n3. Generate GPG keys directly on YubiKey (more secure).\n\n\n```bash\n\n# Start GPG edit mode\n\ngpg --card-edit\n\n\n# Enter admin mode\n\nadmin\n\n\n# Generate key directly on card\n\n# PIN = '123456' | Admin PIN = '12345678'\n\ngenerate\n\n\n# Follow prompts\n\n# See documentation for more info \n\n#\nhttps://support.yubico.com/hc/en-us/articles/360013790259-Using-Your-YubiKey-with-OpenPGP\n\n```\n\n\n4. Export your public key.\n\n\n```bash\n\n# Get your key ID\n\ngpg --list-secret-keys --keyid-format LONG\n\n\n# Export the public key\n\ngpg --armor --export YOUR_KEY_ID\n\n```\n\n\n5. Add the public key to GitLab.\n\n    a. Click on your GitLab Avatar and select **Preferences**.\n\n    b. On the side tab select **GPG Keys**.\n\n    c. Click **Add new key**.\n\n    d. Paste your public key.\n\n    e. Click **Add key**.\n\n6. Configure Git.\n\n\n```bash\n\n# Set signing key\n\ngit config --global user.signingkey YOUR_KEY_ID\n\n\n# Enable automatic signing\n\ngit config --global commit.gpgsign true\n\n\n# Tell GPG which key to use\n\necho \"default-key YOUR_KEY_ID\" >> ~/.gnupg/gpg.conf\n\n```\n\n\n7. Now let’s test the configuration by creating a test commit in a project:\n\n\n```bash\n\n# Make a change in the project\n\n# Add changes\n\ngit add .\n\n\n# Make a test commit\n\ngit commit -S -m \"Test signed commit\"\n\n\n# Verify signature\n\ngit verify-commit HEAD\n\n\n# Push the change\n\ngit push\n\n```\n\n\nThe `git verify-commit HEAD` command should show the GPG key used:\n\n\n```bash\n\ngpg: Signature made Wed Feb 26 11:45:00 2025 CST\n\ngpg:                using RSA key YOUR_KEY_ID\n\ngpg: Good signature from “NAME (DESCRIPTION) \u003CEMAIL>\" [ultimate]\n\n```\n\n\nThen, when viewing the commit in GitLab, you should now see that the commit\nis verified as follows:\n\n\n![Commit is\nverified](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674607/Blog/Content%20Images/verified.png)\n\n\n\u003Ccenter>\u003Ci>Commit verified with GPG key\u003C/i>\u003C/center>\n\n\u003Cbr>\u003C/br>\n\n\nYou can also use the [commits\nAPI](https://docs.gitlab.com/api/commits/#get-signature-of-a-commit) to\ncheck a commit’s signature allowing you to further operationalize the\nverification workflow.\n\n\n## Learn more\n\n\nTo learn more about GitLab, Yubico, and the solutions each provides, check\nout these resources:\n\n\n* [Why GitLab](https://about.gitlab.com/why-gitlab/)  \n\n* [Why Yubico](https://www.yubico.com/why-yubico/)  \n\n* [GitLab Security and Compliance\nSolutions](https://about.gitlab.com/solutions/security-compliance/)  \n\n* [GitLab listing in the \"Works with YubiKey\"\ncatalog](https://www.yubico.com/works-with-yubikey/catalog/gitlab/)  \n\n* [Verified Commits - GitLab\ndocumentation](https://docs.gitlab.com/ee/user/project/repository/signed_commits/)  \n\n* [Push Rules in\nGitLab](https://docs.gitlab.com/user/project/repository/push_rules/)  \n\n* [Sign Commit with GPG Keys\ndocumentation](https://docs.gitlab.com/user/project/repository/signed_commits/gpg/)\n",[233,693,719,9,717,695],{"slug":3526,"featured":91,"template":698},"secure-and-safe-login-and-commits-with-gitlab-yubico","content:en-us:blog:secure-and-safe-login-and-commits-with-gitlab-yubico.yml","Secure And Safe Login And Commits With Gitlab Yubico","en-us/blog/secure-and-safe-login-and-commits-with-gitlab-yubico.yml","en-us/blog/secure-and-safe-login-and-commits-with-gitlab-yubico",{"_path":3532,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3533,"content":3539,"config":3544,"_id":3546,"_type":14,"title":3547,"_source":16,"_file":3548,"_stem":3549,"_extension":19},"/en-us/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17",{"title":3534,"description":3535,"ogTitle":3534,"ogDescription":3535,"noIndex":6,"ogImage":3536,"ogUrl":3537,"ogSiteName":685,"ogType":686,"canonicalUrls":3537,"schema":3538},"Secure by Design principles meet DevSecOps innovation in GitLab 17","GitLab reinforced a commitment to Secure by Design principles across key aspects of the software development lifecycle in latest release, further protecting the software supply chain.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749676004/Blog/Hero%20Images/blog-image-template-1800x945__6_.png","https://about.gitlab.com/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Secure by Design principles meet DevSecOps innovation in GitLab 17\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Elisabeth Burrows\"}],\n        \"datePublished\": \"2024-06-05\",\n      }",{"title":3534,"description":3535,"authors":3540,"heroImage":3536,"date":3541,"body":3542,"category":693,"tags":3543},[844],"2024-06-05","Secure by Design just [turned one](https://about.gitlab.com/blog/happy-birthday-secure-by-design/)! Introduced by the Cybersecurity and Infrastructure Security Agency (CISA) a little over a year ago, Secure by Design principles serve as a directive for technology providers to embed security at the heart of their products from the outset of development. This approach is the clearest answer to address cyber attacks, dramatically reducing the number of exploitable flaws before they are introduced to the market for broad use or consumption. Cyberattacks can be more prevalent when businesses and vendors “bolt on” security as an afterthought, amplifying the need for Secure by Design solutions. With the launch of [GitLab 17](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/), we have strengthened our commitment to Secure by Design principles across five key aspects of the software development lifecycle. Although no supply chain is 100% immune to cyber threats, it is imperative to embrace a proactive security strategy to protect against persistent threats from malicious actors.\n\n> Discover the future of AI-driven software development with our GitLab 17 virtual launch event. [Watch today!](https://about.gitlab.com/seventeen/)\n\n## How GitLab 17 aligns with Secure by Design principles\n\n### 1. Enhance secure coding practices\n\nFostering secure software development practices is a key element to CISA’s Secure by Design framework. CISA recommends alignment to the [Secure Software Development Framework (SSDF)](https://csrc.nist.gov/pubs/sp/800/218/final) from the National Institute of Standards and Technology (NIST). GitLab’s robust [application security scanners](https://about.gitlab.com/blog/getting-started-with-gitlab-application-security/) demonstrate strong default alignment to this framework. In GitLab 17, we added streamlined Static Application Security Testing ([SAST](https://docs.gitlab.com/ee/user/application_security/sast/)) analyzer coverage for more languages, offering a simpler, more customizable scan experience. The recent [acquisition of Oxeye](https://about.gitlab.com/press/releases/2024-03-20-gitlab-acquires-oxeye-to-advance-application-security-and-governance-capabilities/) enhances SAST accuracy, reducing false positives and offering actionable insights to tackle application-layer risks proactively. Other related improvements in GitLab 17 include [API Security Testing analyzer](https://docs.gitlab.com/ee/user/application_security/api_security_testing/) updates, [advanced vulnerability tracking for Secret Detection](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/#advanced-vulnerability-tracking), and [Dependency Scanning support for Android](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/index.html#enabling-dependency-scanning-by-using-cicd-components). GitLab also continues to improve its [dynamic software bill of materials (SBOM) management](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/) capabilities.\n\n### 2. Manage vulnerabilities at scale\n\nMalicious actors capitalize on cost-effective tactics, leveraging basic vulnerabilities to cause widespread disruption. GitLab’s [Vulnerability Report](https://docs.gitlab.com/ee/user/application_security/vulnerability_report/) enables you to quantify risk across your portfolio in a single view, identifying key vulnerability details throughout your supply chain. \nImprovements to [Vulnerability Report filtering](https://docs.gitlab.com/ee/user/application_security/vulnerability_report/#vulnerability-report-filters) in GitLab 17 increased usability of the report at scale. Actionable security findings are vital for developers to address critical weaknesses. GitLab provides [vulnerability insights](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/), [security training for vulnerabilities](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#view-security-training-for-a-vulnerability), and [vulnerability explanation](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#vulnerability-explanation). \n\n### 3. Transition to memory-safe languages with AI\n\nIn a recent [virtual panel with the Atlantic Council](https://www.atlanticcouncil.org/event/the-secure-by-design-imperative/), CISA Senior Technical Advisor Jack Cable stated, “Technology manufacturers must focus on eliminating entire classes of vulnerability, rather than playing “whack-a-mole” with their defects.” In CISA’s [Secure by Design whitepaper](https://www.cisa.gov/sites/default/files/2023-10/SecureByDesign_1025_508c.pdf), they recommend that manufacturers take steps to eliminate one of the largest classes of vulnerabilities by migrating existing products and building new products using memory-safe languages. A memory-safe language is a language where memory allocation and garbage collection are abstracted away from the developer and handled by the programming language itself. Such languages include Python, Java, and Go, to name a few. Vulnerabilities related to memory safety are the most common and dangerous. Technology manufacturers can effectively address vulnerabilities by integrating memory-safe language development practices. [GitLab Duo](https://about.gitlab.com/gitlab-duo/), our suite of AI-powered features, provides AI-accelerated assistance for memory-safe code conversions:\n\n- Accelerate application development: [GitLab Duo Code Explanation](https://docs.gitlab.com/ee/user/ai_features.html#code-explanation-in-the-ide) succinctly articulates code functionality in everyday language, helping developers understand code quickly and add value faster. [GitLab Duo Code Suggestions](https://docs.gitlab.com/ee/user/ai_features.html#code-suggestions) assists developers in writing secure code efficiently and speeding up cycle times by handling repetitive coding tasks effectively.\n- Convert to memory-safe code: [GitLab Duo Chat](https://docs.gitlab.com/ee/user/gitlab_duo_chat_examples.html#refactor-code-in-the-ide) can help expedite memory-safe language refactoring by suggesting changes based on coding patterns, libraries, functions, algorithms, programming languages, performance, or vulnerabilities.\n- Secure AI-generated code: [GitLab Duo Vulnerability Explanation](https://docs.gitlab.com/ee/user/ai_features.html#vulnerability-explanation) provides clear insights into identified security issues, while [GitLab Duo Vulnerability Resolution](https://docs.gitlab.com/ee/user/ai_features.html#vulnerability-resolution) can automatically generate a merge request to mitigate a vulnerability. \n\nIn GitLab 17, we also have added the means to validate and track AI impact to your development progress through [AI Impact Analytics](https://about.gitlab.com/blog/developing-gitlab-duo-ai-impact-analytics-dashboard-measures-the-roi-of-ai/).\n\n### 4. Align to the principle of least privilege\n\nAligning product deployment guides with zero trust architecture, such as the [CISA Zero Trust Maturity Model](https://www.cisa.gov/sites/default/files/2023-04/zero_trust_maturity_model_v2_508.pdf), is a key recommendation in Secure by Design. In zero trust security, the [principle of least privilege (PoLP)](https://about.gitlab.com/blog/the-ultimate-guide-to-least-privilege-access-with-gitlab/0) is a key element within the overarching framework. The PoLP is a concept in which a user's access rights should be limited to the bare minimum needed for them to complete the tasks required within their respective roles. By keeping a tight rein on user access rights, granting only the necessary permissions for their tasks, organizations uphold the core tenet of [zero trust](https://about.gitlab.com/blog/why-devops-and-zero-trust-go-together/). Maintaining a clear separation of duties is the first step in upholding this principle. GitLab's [policy management](https://docs.gitlab.com/ee/administration/compliance.html#policy-management) features empower security and compliance teams to oversee operations while defining responsibilities among security, compliance, legal, and engineering units. By implementing GitLab's [security policies](https://docs.gitlab.com/ee/user/application_security/policies/), development teams gain process flexibility, ensuring the delivery of stable, reliable, and high-quality code. With the ability to establish rules and policies tailored to the organization's unique needs, teams can utilize granular user roles, permissions, and customizable compliance settings for specific projects, groups, and individuals. GitLab 17 introduces enhanced governance controls via [permissions customizations](https://about.gitlab.com/releases/2024/05/16/gitlab-17-0-released/#new-permissions-for-custom-roles), reducing unnecessary privilege escalation.\n\n## How we are committed to Secure by Design principles\n\nOne of the principles of Secure by Design business practices is the notion of leading from the top. It's imperative for organizations to secure executive buy-in that places Secure by Design at the forefront of business priorities, nurturing an environment where security takes precedence. GitLab recently joined the ranks of technology leaders who signed CISA’s [Secure by Design Pledge](https://www.cisa.gov/securebydesign/pledge/statements-of-support#JoshLemosCISOGitLab), showcasing our commitment to uphold CISA’s Secure by Design goals. This public commitment, paired with strategic investments, a culture of transparency, and product designs that prioritize security, fosters a robust security ethos that directly benefits end users. With the launch of GitLab 17, GitLab propels security and compliance solutions forward, harnessing AI advancements to empower clients to embrace a Secure by Design methodology with confidence.\n\n> Get familiar with GitLab's secure-by-design platform today with a [free trial of GitLab Ultimate](https://gitlab.com/-/trial_registrations/new?glm_source=about.gitlab.com/blog&glm_content=default-saas-trial). \n",[186,693,9,951],{"slug":3545,"featured":91,"template":698},"secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17","content:en-us:blog:secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17.yml","Secure By Design Principles Meet Devsecops Innovation In Gitlab 17","en-us/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17.yml","en-us/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17",{"_path":3551,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3552,"content":3558,"config":3563,"_id":3565,"_type":14,"title":3566,"_source":16,"_file":3567,"_stem":3568,"_extension":19},"/en-us/blog/secure-compliant-and-ai-powered-get-to-know-3-new-gitlab-features",{"title":3553,"description":3554,"ogTitle":3553,"ogDescription":3554,"noIndex":6,"ogImage":3555,"ogUrl":3556,"ogSiteName":685,"ogType":686,"canonicalUrls":3556,"schema":3557},"Secure, compliant, and AI-powered: Get to know 3 new GitLab features","Enhance security, leverage new AI capabilities, and protect sensitive data with our latest platform improvements.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749664458/Blog/Hero%20Images/Gartner_AI_Code_Assistants_Blog_Post_Cover_Image_1800x945.png","https://about.gitlab.com/blog/secure-compliant-and-ai-powered-get-to-know-3-new-gitlab-features","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Secure, compliant, and AI-powered: Get to know 3 new GitLab features\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Jessica Hurwitz\"}],\n        \"datePublished\": \"2025-01-27\",\n      }",{"title":3553,"description":3554,"authors":3559,"heroImage":3555,"date":3560,"body":3561,"category":717,"tags":3562},[843],"2025-01-27","AI capabilities are rapidly reshaping how teams build, secure, and deploy applications. As part of our ongoing commitment to helping you navigate the evolving marketplace, GitLab has introduced more than 440 improvements in the past three releases. We're excited to spotlight three standout features making an immediate impact on how teams approach AI-powered DevSecOps. In addition, we announced we are partnering with AWS to launch [GitLab Duo with Amazon Q](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-devsecops-meets-agentic-ai/), combining our strengths to transform software development. We're creating an experience, together, that makes AI-powered development feel seamless and upholds the security, compliance, and reliability that enterprises require.\n\n> Learn how GitLab can [deliver 483% ROI over the next three years](https://about.gitlab.com/blog/gitlab-ultimates-total-economic-impact-483-roi-over-3-years/), according to Forrester Consulting.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://player.vimeo.com/video/1056012314?badge=0\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write; encrypted-media\" title=\"GitLab 17.6-17.8 Quarterly Release Overview\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## 1. Vulnerability Resolution: Streamline security remediation\n\nGitLab’s 2024 [Global DevSecOps Report](https://about.gitlab.com/developer-survey/) found that 66% of companies are releasing software twice as fast — or faster — than in previous years, as businesses strive to deliver more value to their customers than competitors. However, speed introduces risk. With security teams [outnumbered by dev teams 80:1](https://www.opentext.com/assets/documents/en-US/pdf/developer-driven-appsec-security-at-the-speed-of-devops-pp-en.pdf), threat actors are able to exploit applications at a record pace. Last year alone, [80% of the top data breaches](https://www.crowdstrike.com/2024-state-of-application-security-report/) stemmed from attacks at the application layer.\n\n[GitLab Duo Vulnerability Resolution](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#vulnerability-resolution) addresses this challenge head-on. When vulnerabilities are detected in your code, you can now access detailed information right from the vulnerability report and invoke GitLab Duo to automatically create a merge request that updates your code and mitigates the risk. While developers must review these auto-generated merge requests before merging to verify the changes, this automation significantly streamlines the remediation process. Vulnerability Resolution pairs with [Vulnerability Explanation](https://about.gitlab.com/the-source/ai/understand-and-resolve-vulnerabilities-with-ai-powered-gitlab-duo/), which also recently became generally available. Vulnerability Explanation gives developers a detailed description of the vulnerability infecting their code, real-world examples of how attackers can exploit the vulnerable code, and practical suggestions for remediation.\n\nBy expediting the vulnerability remediation process, your teams can focus on delivering software faster while maintaining strong security practices. With less time spent researching and remediating vulnerabilities, developers can concentrate on building features that drive business value.\n\n_GitLab Duo Vulnerability Resolution is available as a [GitLab Duo Enterprise add-on](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/?type=free-trial&toggle=gitlab-duo-pro)._\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"\nhttps://www.youtube.com/embed/VJmsw_C125E?si=W7n1ESS63xkPyH4H\" frameborder=\"0\" title=\"GitLab Vulnerability Resolution\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## 2. Model Registry: Breaking down silos between Data Science and Development teams\n\nFor organizations building AI-powered applications, bridging the gap between data science and software development teams has been a persistent challenge. Data scientists and developers often work in disconnected tools and workflows, leading to friction, delays, and potential errors when deploying models to production.\n\n[GitLab Model Registry](https://docs.gitlab.com/ee/user/project/ml/model_registry/) directly addresses this challenge by providing a centralized hub where data science and development teams can collaborate seamlessly within their existing GitLab workflow. Built with [MLflow](https://docs.gitlab.com/ee/user/project/ml/experiment_tracking/mlflow_client.html#model-registry) native integration, the registry allows data scientists to continue using their preferred tools while making models and artifacts instantly accessible to the broader development team.\nThis unified approach transforms team collaboration. Data scientists can version models, store artifacts, and document model behavior through comprehensive model cards, while developers can easily integrate these models into their applications using GitLab CI/CD pipelines for automated testing and deployment.\n\nAdditionally, the Model Registry's semantic versioning and GitLab API integration enables teams to implement robust governance and automate production deployments, creating a streamlined environment where data scientists and developers can work together effectively to deliver AI-powered innovation.\n\n_Model Registry is available across all tiers for SaaS and self-managed customers. See the [release blog for 17.6](https://about.gitlab.com/releases/2024/11/21/gitlab-17-6-released/#model-registry-now-generally-available) and [documentation](https://docs.gitlab.com/ee/user/project/ml/model_registry/) for more._\n\n## 3. Secret Push Protection: Shift security left with proactive secret detection\n\nTeams often face a critical security challenge: Developers may hardcode sensitive information like API keys, tokens, and credentials as plain text in source code repositories, sometimes without even realizing it. This creates an easy target for threat actors and puts your organization at risk.\n\n[Secret Push Protection](https://about.gitlab.com/blog/prevent-secret-leaks-in-source-code-with-gitlab-secret-push-protection/) directly addresses this problem by blocking developers from pushing code that contains secrets, significantly reducing the likelihood of a breach. It works by leveraging customizable rules to identify high-confidence secrets before they ever reach your repository.\n\nWhat makes this solution particularly powerful is its integration with our pipeline secret detection, creating a comprehensive defense strategy.\n\n_Secret Push Protection is now generally available for all [GitLab Ultimate tier](https://about.gitlab.com/pricing/ultimate/) and [GitLab Dedicated](https://about.gitlab.com/dedicated/) customers._\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"\nhttps://www.youtube.com/embed/SFVuKx3hwNI?si=aV_3Lazs2AiDH3Jf\" title=\"Introduction to Secret Push Protection\" frameborder=\"0\" title=\"GitLab Vulnerability Resolution\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## Put these features to work today\n\nAt GitLab, we’re committed to making it easier for teams to build software, faster. Capabilities like GitLab Duo Vulnerability Resolution, Model Registry, and Secret Push Protection are just a few of the recent innovations we’ve delivered to help developers and security teams level up their DevSecOps workflows. To learn more, check out our [releases page](https://about.gitlab.com/releases/categories/releases/).\n\n> Get started with these new features today with [a free trial of GitLab Ultimate](https://about.gitlab.com/free-trial/).\n",[951,9,695,717,693],{"slug":3564,"featured":91,"template":698},"secure-compliant-and-ai-powered-get-to-know-3-new-gitlab-features","content:en-us:blog:secure-compliant-and-ai-powered-get-to-know-3-new-gitlab-features.yml","Secure Compliant And Ai Powered Get To Know 3 New Gitlab Features","en-us/blog/secure-compliant-and-ai-powered-get-to-know-3-new-gitlab-features.yml","en-us/blog/secure-compliant-and-ai-powered-get-to-know-3-new-gitlab-features",{"_path":3570,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3571,"content":3577,"config":3583,"_id":3585,"_type":14,"title":3586,"_source":16,"_file":3587,"_stem":3588,"_extension":19},"/en-us/blog/securing-your-code-on-gitlab",{"title":3572,"description":3573,"ogTitle":3572,"ogDescription":3573,"noIndex":6,"ogImage":3574,"ogUrl":3575,"ogSiteName":685,"ogType":686,"canonicalUrls":3575,"schema":3576},"The ultimate guide to securing your code on GitLab.com","This in-depth tutorial, complete with best practices, will help you secure your development environment.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749669863/Blog/Hero%20Images/security-pipelines.jpg","https://about.gitlab.com/blog/securing-your-code-on-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"The ultimate guide to securing your code on GitLab.com\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Steve Grossman\"}],\n        \"datePublished\": \"2023-05-31\",\n      }",{"title":3572,"description":3573,"authors":3578,"heroImage":3574,"date":3580,"body":3581,"category":693,"tags":3582},[3579],"Steve Grossman","2023-05-31","\n\nA critical aspect of a DevSecOps methodology is to apply best practices to secure your development environment. Your software should be protected from malicious and accidental exposure or modification. This blog explains how to control and manage access to GitLab.com and, in turn, source code, build pipelines, dependency and package repositories, and deployment keys involved in the [software supply chain](/blog/the-ultimate-guide-to-software-supply-chain-security/). The best practices specifically address the capabilities for end users on multi-tenant GitLab.com and are written for the Ultimate license tier. Not all of these capabilities are available at the Premium tier.\n\n## 1. Group settings\nMany security-related settings can be set on the top-level group and will cascade down into all subgroups and projects. They are the easiest and most important in securing your GitLab.com instance.\n\n### General settings\nIn the top-level group, the following settings should be applied to provide the best security for the code within that group:\n\n#### Make the group visibility level private\nThis is likely the most important setting among general settings. By marking the group “private\", anyone who is not explicitly a member of the group will not be able to access it. Additionally, by making the top-level group private, all subgroups and projects will also be private and cannot be exposed.  \n\n#### Permissions and group features\nUnder permissions:\n - Set “Prevent members from sending invitations to outside groups”. This will prevent accidentally adding people who should not belong to the group.\n - Set “Prevent sharing a project with other groups”. This prevents accidental or malicious exfiltration of code by sharing or moving a project to another group outside the control of the top-level group owner.\n - Allow project and group access token creation. Project and group access tokens are much like [personal access tokens](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html) with the following improvements:\n    - They are visible to and manageable by group owners and maintainers, which means they can be revoked and have expiration dates set by an administrator to limit the opportunity for abuse.\n    - They create a virtual “bot” user that does not count against your license count.\n - Enable [delayed project deletion](/blog/delayed-deletion/). This will give you a seven-day grace period to catch and prevent accidental or malicious removal of a repo. GitLab.com, like self-managed GitLab, does not have the ability to restore an individual project without significant expense for professional services.\n - Allowlist the Classless Inter-Domain Routing (CIDR) or supernetting from which users should be accessing the code.\n - Restrict membership to only those email domains belonging to your organization and contractors.\n - Restrict creation of subgroups to Owners. This will help keep the structure of the top-level group within your policies and make [SAML Group Sync](https://docs.gitlab.com/ee/user/group/saml_sso/group_sync.html) for membership easier to manage.\n - Block forking projects outside of this group hierarchy. This will help prevent code exfiltration.\n - Require [two-factor authentication](https://docs.gitlab.com/ee/user/profile/account/two_factor_authentication.html). This disables the ability to use password authentication with Git over HTTPS.\n - Disallow adding new members to projects within this group. All members must be inherited from the group.  \n\n#### Merge request approvals\n[Merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/) help prevent injection of malicious code into the repository by having people other than the author review them. Enable merge request approvals for all projects in your group to:  \n - Prevent approval by authors.\n - Prevent approvals by users who add commits.\n - Prevent editing approval rules in projects and on individual merge requests.  \n\n### SAML SSO\nTo more tightly control who can access your code in GitLab.com, set up [SAML SSO](https://docs.gitlab.com/ee/user/group/saml_sso/). This will ensure that everyone who accesses it is approved by someone in authority.\n\n#### To configure SAML SSO:  \n - Enable SAML authentication for this group.\n - Enforce SSO-only authentication for web activity for this group.\n - Enforce SSO-only authentication for Git and Dependency Proxy activity for this group.\n - Set the Default membership role to Minimal Access. Roles can be increased as needed in subgroups or individual projects, minimal access prevents any visibility to projects or subgroups where the user is not explicitly granted another role.\n - Tightly control access to the Maintainer and Owner roles; every developer does not need to have a Maintainer role.\n\n## 2. Group auditing and compliance\nRegularly and periodically review the [compliance reports](https://docs.gitlab.com/ee/user/compliance/compliance_report/) to verify who is approving merge requests and what MRs are getting approved.\n\nSet up streaming group audit events to your corporate security information and event management (SIEM) system and monitor them for unusual activity. This needs to be repeated for each group and project in the hierarchy to get the maximum number of audit events.\n\n## 3. Group-level push rules\n Setting restrictive push rules at the group level will help ensure malicious code is not injected into the repository:\n - Require committers be verified.\n - Reject unsigned commits.\n - Ensure the commit author is a GitLab user.\n - Prevent pushing secret files.\n - Require commit author’s email to be from your email domain.  \n\n## CI/CD  \nThe following settings can help insure the integrity of [CI/CD](/topics/ci-cd/) pipelines and reduce the opportunities for abuse and malice:\n - Register runners at the lowest practical level to reduce the blast radius of any malicious use.\n - Require tags to use all runners to reduce the opportunity for abuse.\n - Define CI/CD variables – especially if they contain secrets – at the lowest practical level to reduce the blast radius of any malicious use.\n - Use protected runners with protected variables and protected branches to significantly limit who can deploy into production environments or misuse cloud resources.\n - Access to change the .gitlab-ci.yml pipeline definition file should be tightly controlled in all repos through the CODEOWNERS file to prevent malicious use of the CI/CD system.\n\n## 4. Project settings  \nSome settings do not cascade down from the group or are not available at the group level and must be set on individual projects instead. These include some repo-specific settings.\n\n### Repository\nSet up protected branches and protected tags to go along with the protected runners and protected variables defined above.\n\n### CI/CD  \n#### General \n - Disable public pipelines.\n - Use separate caches for protected branches.  \n\n### Protected environments\nUse protected environments and tightly limit who can deploy and require approvals for deploying.\n\n#### Token access\nRestrict access to this project’s CI_JOB_TOKEN to only individual projects to ensure malicious projects to not retrieve the token and use it to access the API.\n\n#### Secure files\nStore keystores, provisioning profiles and signing certificates in the Secure Files storage rather than the repository.  \n\n## 5. Project-level security testing and compliance\n### Configuration\n#### Security testing\n - Enable static application security testing [SAST](https://docs.gitlab.com/ee/user/application_security/sast/) to help prevent insertion of malicious code into the application.\n - Enable dependency scanning and regularly review the dependency list or software, or software bill of materials ([SBOM](/blog/the-ultimate-guide-to-sboms/)], generated by dependency scanning for vulnerabilities and malicious components.\n - Enable [container scanning](https://docs.gitlab.com/ee/user/application_security/container_scanning/) and cluster image scanning.  \n\n#### Policies\nAs an alternative to the security testing section above, you may choose to enable scan execution policies.\nEnable [test scan result policies](https://docs.gitlab.com/ee/user/application_security/policies/scan-result-policies.html) to prevent merging code with critical vulnerabilities.\n\n\nFollowing these best practices will help ensure that your code hosted on GitLab.com is safe from tampering and \n[public exposure](https://www.engadget.com/okta-stolen-source-code-205601214.html) and that your \nsoftware supply chain is secure and only authorized users \nare accessing your software assets.\n\n## More resources\n- [Group level settings documentation](https://docs.gitlab.com/ee/user/group/)\n- [Project level settings documentation](https://docs.gitlab.com/ee/user/project/settings/)\n",[719,9],{"slug":3584,"featured":6,"template":698},"securing-your-code-on-gitlab","content:en-us:blog:securing-your-code-on-gitlab.yml","Securing Your Code On Gitlab","en-us/blog/securing-your-code-on-gitlab.yml","en-us/blog/securing-your-code-on-gitlab",{"_path":3590,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3591,"content":3596,"config":3602,"_id":3604,"_type":14,"title":3605,"_source":16,"_file":3606,"_stem":3607,"_extension":19},"/en-us/blog/self-service-security-alert-handling-with-gitlabs-uam",{"title":3592,"description":3593,"ogTitle":3592,"ogDescription":3593,"noIndex":6,"ogImage":3499,"ogUrl":3594,"ogSiteName":685,"ogType":686,"canonicalUrls":3594,"schema":3595},"Self-service security alert handling with GitLab's UAM","The User Attestation Module automates security alerts by routing them directly to team members for verification, reducing manual SecOps work and enhancing audit trails.","https://about.gitlab.com/blog/self-service-security-alert-handling-with-gitlabs-uam","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Self-service security alert handling with GitLab's UAM\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Bala Allam\"},{\"@type\":\"Person\",\"name\":\"Matt Coons\"}],\n        \"datePublished\": \"2025-03-18\",\n      }",{"title":3592,"description":3593,"authors":3597,"heroImage":3499,"date":3599,"body":3600,"category":693,"tags":3601},[3598,2843],"Bala Allam","2025-03-18","The [GitLab Security Operations team](https://handbook.gitlab.com/handbook/security/security-operations/) prioritizes automation that enables security engineers to focus on high-impact work rather than routine tasks that can be automated. A key innovation in this automation strategy is creation of the User Attestation Module (UAM), which allows GitLab team members to directly respond to and verify security alerts flagged as potentially malicious. When the [GUARD framework](https://about.gitlab.com/blog/automating-cybersecurity-threat-detections-with-gitlab-ci-cd/) detects suspicious activity, it routes the alert to the relevant team member for review. The team member can then attest whether they recognize and authorize the activity. Their response is recorded for audit purposes, and, based on their input, the system either closes the alert or escalates it to the Security Incident Response Team (SIRT).\n\nIn this article, you'll learn about the UAM and how it can benefit your DevSecOps environment.\n\n## How the User Attestation Module works\n\nThe UAM streamlines security alert handling through a comprehensive workflow that includes:\n\n* Alert verification by team members  \n* Collection and documentation of supporting evidence  \n* Option to request additional support from GitLab SecOps  \n* Secure storage of team member responses  \n* Automated alert resolution or incident escalation  \n* Team member feedback collection for continuous improvement\n\nWe created UAM to help us: \n\n1. Route low priority alerts (such as administrative activities) to the relevant team members who performed them.  \n2. Reduce alert fatigue by first checking with the team member who completed the activity before routing to SIRT if necessary.   \n3. Collect and store team member responses to maintain an audit trail and rich metrics.   \n4. Create a response tier between **SIRT needs to triage this alert** and **This is an informational signal that does not need to be reviewed directly**.\n\n## UAM's design principles\n\nThe UAM is a Slack-first automation that reaches out to team members to validate activity directly in Slack, reducing effort and increasing participation. Today, 40% of all security alerts are delivered to team members through the UAM, saving SIRT valuable time to focus on higher importance alerts and incidents. \n\nA robust escalation workflow in the UAM ensures that all alerts are validated by team members or escalated to SIRT. When a UAM alert reaches a team member, they have a period of time to respond attesting to the activity or stating they do not recognize the activity. If no response is recorded, the UAM alert is auto-escalated to SIRT for handling. \n\nComprehensive metrics collection is a core GUARD design principle, which extends to how we designed UAM. All user interactivity with triggered UAM alerts is logged in a metrics database, which enables comprehensive measurement to identify problematic alerts, opportunities for process improvement, and overall UAM health.\n\nUAM enables a third alert tier, bridging the gap between alerts that always needed to be investigated, and lower importance informational signals that are grouped by entity for escalation and correlation. \n\n- Stable alerts (must be triaged and investigated by SIRT)  \n- UAM alerts (routed to team members to attest to the activity)  \n- Informational signal (low-importance events that are interesting and correlated by entity grouping)\n\n## UAM components \n\nThe UAM framework consists of multiple components:\n\n- GitLab: Fetches a user email address based on user_id via user’s API and stores user's responses  - Slack: Searches each user by email using Slack API and posts a UAM notification to the end user as well as collects responses from users using Slack modals  \n- Tines: Processes and orchestrates user responses and alerts     \n- Devo: Receives alert payload and alert notifications  \n- Metrics DB: Records metrics for triggered UAM alerts\n\nThe workflow integrates with following modules:\n\n- GitLab API for user identification  \n- Slack API for user communication  \n- Webhook configuration for alert reception  \n- Audit trail storage in GitLab\n\n## UAM workflow\n\nThe diagram below illustrates the workflow of the UAM module:\n\n![UAM - flow chart](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674678/Blog/Content%20Images/UAM_detection_edited.png)\n\n## Following along with GUARD\n\nWe are still unveiling parts of GUARD and how it works, so [follow along](https://gitlab.com/gitlab-security-oss/guard) to learn how we automate our security detections from end to end.\n\n## Read more about the GUARD framework\n\n- [Unveiling the GUARD framework to automate security detections at GitLab](https://about.gitlab.com/blog/unveiling-the-guard-framework-to-automate-security-detections-at-gitlab/)  \n- [Automating cybersecurity threat detections with GitLab CI/CD](https://about.gitlab.com/blog/automating-cybersecurity-threat-detections-with-gitlab-ci-cd/)\n- [Open Source Security at GitLab](https://about.gitlab.com/security/open-source-resources/)",[693,719,9,717,2728],{"slug":3603,"featured":6,"template":698},"self-service-security-alert-handling-with-gitlabs-uam","content:en-us:blog:self-service-security-alert-handling-with-gitlabs-uam.yml","Self Service Security Alert Handling With Gitlabs Uam","en-us/blog/self-service-security-alert-handling-with-gitlabs-uam.yml","en-us/blog/self-service-security-alert-handling-with-gitlabs-uam",{"_path":3609,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3610,"content":3615,"config":3621,"_id":3623,"_type":14,"title":3624,"_source":16,"_file":3625,"_stem":3626,"_extension":19},"/en-us/blog/set-up-infrastructure-for-cloud-development-environments",{"title":3611,"description":3612,"ogTitle":3611,"ogDescription":3612,"noIndex":6,"ogImage":1513,"ogUrl":3613,"ogSiteName":685,"ogType":686,"canonicalUrls":3613,"schema":3614},"Cloud infrastructure for on-demand development in GitLab","Learn how to set up the requirements, manage Kubernetes clusters in different clouds, create the first workspaces and custom images, and get tips and troubleshooting.","https://about.gitlab.com/blog/set-up-infrastructure-for-cloud-development-environments","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Set up your infrastructure for on-demand, cloud-based development environments in GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Michael Friedrich\"}],\n        \"datePublished\": \"2023-07-13\",\n      }",{"title":3616,"description":3612,"authors":3617,"heroImage":1513,"date":3618,"body":3619,"category":928,"tags":3620},"Set up your infrastructure for on-demand, cloud-based development environments in GitLab",[1379],"2023-07-13","Cloud-based development environments enable a better developer onboarding experience and help make teams more efficient. In this tutorial, you'll learn how to ready your infrastructure for on-demand, cloud-based development environments. You'll also learn how to set up the requirements, manage Kubernetes clusters in different clouds, create your first workspaces and custom images, and get tips for troubleshooting.\n\nThe GitLab agent for Kubernetes, an OAuth GitLab app, and a proxy pod deployment make the setup reproducible in different Kubernetes cluster environments and follow cloud-native best practices. Bringing your infrastructure allows platform teams to store the workspace data securely, control resource usage, harden security, and troubleshoot the deployments in known ways.\n\nThis blog post is a long read so feel free to navigate to the sections of interest. However, if you want to follow the tutorial step by step, the sections depend on one another for the parts pertaining to infrastructure setup.\n\n- [Development environments on your infrastructure](#development-environments-on-your-infrastructure)\n- [Requirements](#requirements)\n    - [Workspaces domain](#workspaces-domain)\n    - [TLS certificates](#tls-certificates)\n- [GitLab OAuth application ](#gitlab-oauth-application)\n- [Kubernetes cluster setup](#kubernetes-cluster-setup)\n    - [Set up infrastructure with Google Kubernetes Engine (GKE)](#set-up-infrastructure-with-google-kubernetes-engine=gke)\n    - [Set up infrastructure with Amazon Elastic Kubernetes Service (EKS)](#set-up-infrastructure-with-amazon-elastic-kubernetes-service-eks)\n    - [Set up infrastructure with Azure Managed Kubernetes Service (AKS)](#set-up-infrastructure-with-azure-managed-kubernetes-service-aks)\n    - [Set up infrastructure with Civo Cloud Kubernetes](#set-up-infrastructure-with-civo-cloud-kubernetes)\n    - [Set up infrastructure with self-managed Kubernetes](#set-up-infrastructure-with-self-managed-kubernetes)\n- [Workspaces proxy installation into Kubernetes](#workspaces-proxy-installation-into-kubernetes)\n- [Agent for Kubernetes installation](#agent-for-kubernetes-installation)\n- [Workspaces creation](#workspaces-creation)\n    - [Create the first workspaces](#create-the-first-workspaces)\n    - [Custom workspace container images](#custom-workspace-container-images)\n- [Tips](#tips)\n    - [Certificate management](#certificate-management)\n    - [Troubleshooting](#troubleshooting)\n    - [Contribute](#contribute)\n- [Share your feedback](#share-your-feedback)\n\n## Development environments on your infrastructure\nSecure, on-demand, cloud-based development workspaces are [available in beta for public projects](/blog/introducing-workspaces-beta/) for Premium and Ultimate customers. The first iteration allows you to bring your own infrastructure as a Kubernetes cluster. GitLab already deeply integrates with Kubernetes through the GitLab agent for Kubernetes, setting the foundation for configuration and cluster management.\n\nUsers can define and use a development environment template in a project. Workspaces in GitLab support the [devfile specification](https://docs.gitlab.com/ee/user/workspace/#devfile) as `.devfile.yaml` in the project repository root. The devfile attributes allow configuring of the workspace. For example, the `image` attribute specifies the container image to run and create the workspace in isolated container environments. The containers require a cluster orchestrator, such as Kubernetes, that manages resource usage and ensures data security and safety. Workspaces also need authorization: Project source code may contain sensitive intellectual property or otherwise confidential data in specific environments. The setup requires a GitLab OAuth application as the foundation here.\n\nThe following steps provide an in-depth setup guide for different cloud providers. If you prefer to set up your own environment, please follow the [documentation for workspace prerequisites](https://docs.gitlab.com/ee/user/workspace/#prerequisites). In general, we will practice the following steps:\n0. (Optional) Register a workspaces domain, and create TLS certificates.\n1. Create a Kubernetes cluster and configure access and requirements.\n2. Install an Ingress controller.\n3. Set up the workspaces proxy with the domain, TLS certificates, and OAuth app.\n4. Create a new GitLab group with a GitLab agent project. The agent can be used for all projects in that group.\n5. Install the GitLab agent for Kubernetes using the UI provided Helm chart command.\n6. Create an example project with a devfile configuration for workspaces.\n\nSome commands do not use the terminal indicator (`$` or `#`) to support easier copy-paste of command blocks into terminals.\n\n## Requirements\nThe steps in this blog post require the following CLI tools:\n1. `kubectl` and `helm` for Kubernetes\n2. `certbot` for Let's Encrypt\n3. git, curl, dig, openssl, and sslscan for troubleshooting\n\n### Workspaces domain\nWorkspaces require a domain with DNS entries. Cloud providers, for example, Google Cloud, also provide domain services which integrate more easily. You can also register and manage domains with your preferred provider.\n\nThe required DNS entries will be:\n- Wildcard DNS (`*.remote-dev.dev`) and hostname (`remote-dev.dev`) A/AAAA records pointing to the external Kubernetes external IP: `kubectl get services -A`\n- (Optional, with Let's Encrypt) ACME DNS challenge entries as TXT records\n\nAfter acquiring a domain, wait until the Kubernetes setup is ready and extract the A/AAAA records for the DNS settings. The following example shows how `remote-dev.dev` is configured in the Google Cloud DNS service.\n\n![GitLab remote development workspaces, example DNS configuration for remote-dev.dev](https://about.gitlab.com/images/blogimages/infrastructure-cloud-development-environments/gitlab_remote_dev_workspaces_google_cloud_dns_remote-dev.dev-entries.png){: .shadow}\n\nExport shell variables that define the workspaces domains, and the email contact. These variables will be used in all setup steps below.\n\n```\nexport EMAIL=\"user@company.com\"\nexport GITLAB_WORKSPACES_PROXY_DOMAIN=\"remote-dev.dev\"\nexport GITLAB_WORKSPACES_WILDCARD_DOMAIN=\"*.remote-dev.dev\"\n```\n\n**Note:** This blog post will show the example domain `remote-dev.dev` for better understanding with a working example. The domain `remote-dev.dev` is maintained by the [Developer Evangelism team at GitLab](/handbook/marketing/developer-relations/developer-evangelism/projects/). There are no public demo environments available at the time of writing this blog post.\n\n### TLS certificates\nTLS certificates can be managed with different methods. To get started quickly, it is recommended to follow the [documentation steps](https://docs.gitlab.com/ee/user/workspace/#prerequisites) with Let's Encrypt and later consider production requirements with TLS certificates.\n\n```shell\ncertbot -d \"${GITLAB_WORKSPACES_PROXY_DOMAIN}\" \\\n  -m \"${EMAIL}\" \\\n  --config-dir ~/.certbot/config \\\n  --logs-dir ~/.certbot/logs \\\n  --work-dir ~/.certbot/work \\\n  --manual \\\n  --preferred-challenges dns certonly\n\n  certbot -d \"${GITLAB_WORKSPACES_WILDCARD_DOMAIN}\" \\\n  -m \"${EMAIL}\" \\\n  --config-dir ~/.certbot/config \\\n  --logs-dir ~/.certbot/logs \\\n  --work-dir ~/.certbot/work \\\n  --manual \\\n  --preferred-challenges dns certonly\n```\n\nThe Let's Encrypt CLI prompts you for the ACME DNS challenge. This requires setting TXT records for the challenge session immediately. Add the DNS records and specify a low TTL (time-to-live) of 300 seconds to update the records during the first steps.\n\n```\n_acme-challenge TXT \u003Cstringfromletsencryptacmechallenge>\n```\n\nYou can verify the DNS records using the `dig` CLI command.\n\n```shell\n$ dig _acme-challenge.remote-dev.dev txt\n...\n;; ANSWER SECTION:\n_acme-challenge.remote-dev.dev.\t246 IN\tTXT\t\"TlGRM9JGdXHGVklPWgytflxWDF82Sv04nF--Wl9JFvg\"\n_acme-challenge.remote-dev.dev.\t246 IN\tTXT\t\"CqG_54w6I0heWF3wLMAmUAitPcUMs9qAU9b8QhBWFj8\"\n```\n\nOnce the Let's Encrypt routine is complete, note the TLS certificate location.\n\n```\nSuccessfully received certificate.\nCertificate is saved at: /Users/mfriedrich/.certbot/config/live/remote-dev.dev/fullchain.pem\nKey is saved at:         /Users/mfriedrich/.certbot/config/live/remote-dev.dev/privkey.pem\nThis certificate expires on 2023-08-15.\nThese files will be updated when the certificate renews.\n\nSuccessfully received certificate.\nCertificate is saved at: /Users/mfriedrich/.certbot/config/live/remote-dev.dev-0001/fullchain.pem\nKey is saved at:         /Users/mfriedrich/.certbot/config/live/remote-dev.dev-0001/privkey.pem\nThis certificate expires on 2023-08-15.\nThese files will be updated when the certificate renews.\n```\n\nExport the TLS certificate paths into environment variables for the following setup steps.\n\n```shell\nexport WORKSPACES_DOMAIN_CERT=\"${HOME}/.certbot/config/live/${GITLAB_WORKSPACES_PROXY_DOMAIN}/fullchain.pem\"\nexport WORKSPACES_DOMAIN_KEY=\"${HOME}/.certbot/config/live/${GITLAB_WORKSPACES_PROXY_DOMAIN}/privkey.pem\"\n\nexport WILDCARD_DOMAIN_CERT=\"${HOME}/.certbot/config/live/${GITLAB_WORKSPACES_PROXY_DOMAIN}-0001/fullchain.pem\"\nexport WILDCARD_DOMAIN_KEY=\"${HOME}/.certbot/config/live/${GITLAB_WORKSPACES_PROXY_DOMAIN}-0001/privkey.pem\"\n```\n\n**Note**: If you prefer to use your certificates, please copy the files into a safe location, and export the environment variables with the path details.\n\n## GitLab OAuth application\n_After preparing the requirements, continue with the components setup._\n\nCreate a [group-owned OAuth application](https://docs.gitlab.com/ee/integration/oauth_provider.html) for the remote development workspaces group. Creating a centrally managed app with a service account or group with limited access is recommended for production use.\n\nNavigate into the group `Settings > Applications` and specify the following values:\n\n1. Name: `Remote Development workspaces by \u003Cresponsible team> - \u003Cdomain>`. Add the reponsible team that is trusted in your organization. For debugging, add the domain. There might be multiple authorization groups, this helps the identification which workspace domain is used.\n2. Redirect URI: `https://\u003CGITLAB_WORKSPACES_PROXY_DOMAIN>/auth/callback`. Replace `GITLAB_WORKSPACES_PROXY_DOMAIN` with the domain string value.\n3. Set the scopes to `api, read_user, openid, profile` .\n\n![GitLab remote development workspaces, OAuth application in the group settings](https://about.gitlab.com/images/blogimages/infrastructure-cloud-development-environments/gitlab_remote_dev_workspaces_oauth_app_create.png){: .shadow}\n\nStore the OAuth application details in your password vault, and export them as shell environment variables for the next setup steps.\n\nCreate a configuration secret for the proxy as a signing key (`SIGNING_KEY`), and store it in a safe place (for example, use a secrets vault like 1Password to create and store the key).\n\n```\nexport CLIENT_ID=\"XXXXXXXXX\" # Look into password vault and set\nexport CLIENT_SECRET=\"XXXXXXXXXX\" # Look into password vault and set\nexport REDIRECT_URI=\"https://${GITLAB_WORKSPACES_PROXY_DOMAIN}/auth/callback\"\n\nexport GITLAB_URL=\"https://gitlab.com\" # Replace with your self-managed GitLab instance URL if not using GitLab.com SaaS\nexport SIGNING_KEY=\"a_random_key_consisting_of_letters_numbers_and_special_chars\" # Look into password vault and set\n```\n\n## Kubernetes cluster setup\nThe following sections describe how to set up a Kubernetes cluster in different cloud and on-premises environments and install an [ingress controller](https://kubernetes.io/docs/concepts/services-networking/ingress-controllers/) for HTTP access. After completing the Kubernetes setup, you can continue with the workspaces proxy and agent setup steps.\n\n**Choose one method to create a Kubernetes cluster. Note: Use `amd64` as platform architecture [until multi-architecture support is available for running workspaces](https://gitlab.com/groups/gitlab-org/-/epics/10594).** Cloud environments with Arm support will not work yet, for example AWS EKS on Graviton EC2 instances.\n\nYou should have defined the following variables from the previous setup steps:\n\n```sh\nexport EMAIL=\"user@company.com\"\nexport GITLAB_WORKSPACES_PROXY_DOMAIN=\"remote-dev.dev\"\nexport GITLAB_WORKSPACES_WILDCARD_DOMAIN=\"*.remote-dev.dev\"\n\nexport WORKSPACES_DOMAIN_CERT=\"${HOME}/.certbot/config/live/${GITLAB_WORKSPACES_PROXY_DOMAIN}/fullchain.pem\"\nexport WORKSPACES_DOMAIN_KEY=\"${HOME}/.certbot/config/live/${GITLAB_WORKSPACES_PROXY_DOMAIN}/privkey.pem\"\n\nexport WILDCARD_DOMAIN_CERT=\"${HOME}/.certbot/config/live/${GITLAB_WORKSPACES_PROXY_DOMAIN}-0001/fullchain.pem\"\nexport WILDCARD_DOMAIN_KEY=\"${HOME}/.certbot/config/live/${GITLAB_WORKSPACES_PROXY_DOMAIN}-0001/privkey.pem\"\n\nexport CLIENT_ID=\"XXXXXXXXX\" # Look into password vault and set\nexport CLIENT_SECRET=\"XXXXXXXXXX\" # Look into password vault and set\nexport REDIRECT_URI=\"https://${GITLAB_WORKSPACES_PROXY_DOMAIN}/auth/callback\"\n\nexport GITLAB_URL=\"https://gitlab.com\" # Replace with your self-managed GitLab instance URL if not using GitLab.com SaaS\nexport SIGNING_KEY=\"XXXXXXXX\" # Look into password vault and set\n\n```\n\n### Set up infrastructure with Google Kubernetes Engine (GKE)\n\n[Install and configure the Google Cloud SDK and `gcloud` CLI](https://cloud.google.com/sdk/docs/install?hl=en), and install the `gke-gcloud-auth-plugin` plugin to authenticate against Google Cloud.\n\n```shell\nbrew install --cask google-cloud-sdk\n\ngcloud components install gke-gcloud-auth-plugin\n\ngcloud auth login\n```\n\nCreate a new GKE cluster using the `gcloud` command, or follow the steps in the Google Cloud Console.\n\n```shell\n\nexport GCLOUD_PROJECT=group-community\nexport GCLOUD_CLUSTER=de-remote-development-1\n\ngcloud config set project $GCLOUD_PROJECT\n\n# Create cluster (modify for your needs)\ngcloud container clusters create $GCLOUD_CLUSTER \\\n    --release-channel stable \\\n    --zone us-central1-c \\\n    --project $GCLOUD_PROJECT\n\n# Verify cluster\ngcloud container clusters list\n\nNAME                     LOCATION         MASTER_VERSION   MASTER_IP       MACHINE_TYPE  NODE_VERSION       NUM_NODES  STATUS\nde-remote-development-1  us-central1-c    1.26.3-gke.1000  34.136.33.199   e2-medium     1.26.3-gke.1000    3          RUNNING\n\ngcloud container clusters get-credentials $GCLOUD_CLUSTER --zone us-central1-c --project $GCLOUD_PROJECT\nFetching cluster endpoint and auth data.\nkubeconfig entry generated for de-remote-development-1.\n```\n\n1. The setup requires the [`Kubernetes Engine Admin` role in Google IAM](https://cloud.google.com/kubernetes-engine/docs/concepts/access-control?hl=en#recommendations) to create ClusterRoleBindings.\n2. Create a new Kubernetes cluster (do not use Autopilot).\n3. Ensure that [cluster autoscaling](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-autoscaler?hl=en) is enabled in the GKE cluster.\n4. Verify that a [default Storage Class](https://cloud.google.com/kubernetes-engine/docs/concepts/persistent-volumes?hl=en#storageclasses) has been defined.\n5. Install an Ingress controller, for example [ingress-nginx](https://kubernetes.github.io/ingress-nginx/deploy/#gce-gke). Follow the documentation and run the following commands to install `ingress-nginx` into the Kubernetes cluster.\n\n```shell\nkubectl create clusterrolebinding cluster-admin-binding \\\n  --clusterrole cluster-admin \\\n  --user $(gcloud config get-value account)\n\nkubectl apply -f https://raw.githubusercontent.com/kubernetes/ingress-nginx/controller-v1.7.1/deploy/static/provider/cloud/deploy.yaml\n```\n\nPrint the external IP for the DNS records, and update wildcard DNS (`*.remote-dev.dev`) and hostname (`remote-dev.dev`).\n\n```shell\ngcloud container clusters list\n\nkubectl get services -A\n```\n\n### Set up infrastructure with Amazon Elastic Kubernetes Service (EKS)\nCreating an Amazon EKS cluster requires [cluster IAM roles](https://docs.aws.amazon.com/eks/latest/userguide/create-cluster.html). You can the [`eksctl` CLI for Amazon EKS](https://eksctl.io/), which automatically creates the roles. `eksctl` [requires the AWS IAM Authenticator for Kubernetes](https://github.com/weaveworks/eksctl/blob/main/README.md#prerequisite), which will get pulled with Homebrew automatically on macOS.\n\n```shell\nbrew install eksctl awscli aws-iam-authenticator\naws configure\n\neksctl create cluster --name remote-dev \\\n    --region us-west-2 \\\n    --node-type m5.xlarge \\\n    --nodes 3 \\\n    --nodes-min=1 \\\n    --nodes-max=4 \\\n    --version=1.26 \\\n    --asg-access\n```\n\nThe eksctl command uses the [`--asg-access`, `--nodes-min/max` parameters for auto-scaling](https://eksctl.io/usage/autoscaling/). The autoscaler requires [additional configuration steps](https://github.com/kubernetes/autoscaler/blob/master/cluster-autoscaler/cloudprovider/aws/README.md), alternatively [Karpenter is supported in Amazon EKS](https://karpenter.sh/docs/getting-started/getting-started-with-karpenter/). Review the [autoscaling documentation](https://docs.aws.amazon.com/eks/latest/userguide/autoscaling.html), and [default Storage Class `gp2`](https://docs.aws.amazon.com/eks/latest/userguide/storage-classes.html) fulfilling the requirements. The Kubernetes configuration is automatically updated locally.\n\nInstall the [Nginx Ingress controller for EKS](https://kubernetes.github.io/ingress-nginx/deploy/#aws). Follow the documentation and run the following command to install `ingress-nginx` into the Kubernetes cluster.\n\n```shell\nkubectl apply -f https://raw.githubusercontent.com/kubernetes/ingress-nginx/controller-v1.8.0/deploy/static/provider/aws/deploy.yaml\n```\n\nPrint the external IP for the DNS records, and update wildcard DNS (`*.remote-dev.dev`) and hostname (`remote-dev.dev`).\n\n```shell\neksctl get cluster --region us-west-2 --name remote-dev\n\nkubectl get services -A\n```\n\n### Set up infrastructure with Azure Managed Kubernetes Service (AKS)\nInstall [Azure CLI](https://learn.microsoft.com/en-us/azure/aks/learn/quick-kubernetes-deploy-cli).\n\n```shell\nbrew install azure-cli\n\naz login\n```\n\nReview the documentation for the [cluster autoscaler in AKS](https://learn.microsoft.com/en-us/azure/aks/cluster-autoscaler) and the [default Storage Class being `managed-csi`](https://learn.microsoft.com/en-us/azure/aks/concepts-storage#storage-classes), create a new resource group, and create a new Kubernetes cluster. Download the Kubernetes configuration to continue with the `kubectl` commands.\n\n```shell\naz group create --name remote-dev-rg --location eastus\n\naz aks create \\\n--resource-group remote-dev-rg \\\n--name remote-dev \\\n--node-count 1 \\\n--vm-set-type VirtualMachineScaleSets \\\n--load-balancer-sku standard \\\n--enable-cluster-autoscaler \\\n--min-count 1 \\\n--max-count 3\n\naz aks get-credentials --resource-group remote-dev-rg --name remote-dev\n```\n\nInstall the [Nginx ingress controller in AKS](https://learn.microsoft.com/en-us/azure/aks/ingress-basic?tabs=azure-cli#basic-configuration). Follow the documentation and run the following commands to install `ingress-nginx` into the Kubernetes cluster.\n\n```shell\nNAMESPACE=ingress-basic\n\nhelm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx\nhelm repo update\n\nhelm install ingress-nginx ingress-nginx/ingress-nginx \\\n  --create-namespace \\\n  --namespace $NAMESPACE \\\n  --set controller.service.annotations.\"service\\.beta\\.kubernetes\\.io/azure-load-balancer-health-probe-request-path\"=/healthz\n```\n\nPrint the external IP for the DNS records, and update wildcard DNS (`*.remote-dev.dev`) and hostname (`remote-dev.dev`).\n\n```shell\nkubectl get services --namespace ingress-basic -o wide -w ingress-nginx-controller\n\nkubectl get services -A\n```\n\n### Set up infrastructure with Civo Cloud Kubernetes\nInstall and configure the [Civo CLI](https://www.civo.com/docs/kubernetes/create-a-cluster#creating-a-cluster-using-civo-cli), and create a Kubernetes cluster using 2 nodes, 4 CPUs, 8 GB RAM.\n\n```shell\ncivo kubernetes create remote-dev -n 2 -s g4s.kube.large\n\ncivo kubernetes config remote-dev --save\nkubectl config use-context remote-dev\n```\n\nYou have full permissions on the cluster to create ClusterRoleBindings. The [default Storage Class](https://www.civo.com/docs/kubernetes/kubernetes-volumes#creating-a-persistent-volume-claim-pvc) is set to 'civo-volume'.\n\nInstall the [Nginx Ingress controller using Helm](https://kubernetes.github.io/ingress-nginx/deploy/#quick-start). Follow the documentation and run the following command to install `ingress-nginx` into the Kubernetes cluster.\n\n```shell\nhelm upgrade --install ingress-nginx ingress-nginx \\\n  --repo https://kubernetes.github.io/ingress-nginx \\\n  --namespace ingress-nginx --create-namespace\n\n```\n\nPrint the external IP for the DNS records, and update wildcard DNS (`*.remote-dev.dev`) and hostname (`remote-dev.dev`).\n\n```shell\ncivo kubernetes show remote-dev\n\nkubectl get services -A\n```\n\n### Set up infrastructure with self-managed Kubernetes\nThe process follows similar steps, requiring a user with permission to create `ClusterRoleBinding` resources. The [Nginx Ingress controller](https://kubernetes.github.io/ingress-nginx/deploy/#quick-start) is the fastest path forward. Once the cluster is ready, print the load balancer IP for the DNS records, and create/update A/AAAA record for wildcard DNS (`*.remote-dev.dev`) and hostname (`remote-dev.dev`) pointing to the load balancer IP.\n\n## Workspaces proxy installation into Kubernetes\n_After completing the Kubernetes cluster setup with one of your preferred providers, please continue with the next steps._\n\nAdd the Helm repository for the workspaces proxy (it is using the [Helm charts feature in the GitLab package registry](https://docs.gitlab.com/ee/user/packages/helm_repository/)).\n\n```shell\nhelm repo add gitlab-workspaces-proxy \\\n  https://gitlab.com/api/v4/projects/gitlab-org%2fremote-development%2fgitlab-workspaces-proxy/packages/helm/devel\n```\n\nInstall the gitlab-workspaces-proxy, and optionally [specify the most current chart version](https://gitlab.com/gitlab-org/remote-development/gitlab-workspaces-proxy/-/blob/main/helm/Chart.yaml). If you are using a different ingress controller than Nginx, you need to change the `ingress.className` key. Re-run the command when new TLS certificates need to be installed.\n\n```shell\nhelm repo update\n\nhelm upgrade --install gitlab-workspaces-proxy \\\n  gitlab-workspaces-proxy/gitlab-workspaces-proxy \\\n  --version 0.1.6 \\\n  --namespace=gitlab-workspaces \\\n  --create-namespace \\\n  --set=\"auth.client_id=${CLIENT_ID}\" \\\n  --set=\"auth.client_secret=${CLIENT_SECRET}\" \\\n  --set=\"auth.host=${GITLAB_URL}\" \\\n  --set=\"auth.redirect_uri=${REDIRECT_URI}\" \\\n  --set=\"auth.signing_key=${SIGNING_KEY}\" \\\n  --set=\"ingress.host.workspaceDomain=${GITLAB_WORKSPACES_PROXY_DOMAIN}\" \\\n  --set=\"ingress.host.wildcardDomain=${GITLAB_WORKSPACES_WILDCARD_DOMAIN}\" \\\n  --set=\"ingress.tls.workspaceDomainCert=$(cat ${WORKSPACES_DOMAIN_CERT})\" \\\n  --set=\"ingress.tls.workspaceDomainKey=$(cat ${WORKSPACES_DOMAIN_KEY})\" \\\n  --set=\"ingress.tls.wildcardDomainCert=$(cat ${WILDCARD_DOMAIN_CERT})\" \\\n  --set=\"ingress.tls.wildcardDomainKey=$(cat ${WILDCARD_DOMAIN_KEY})\" \\\n  --set=\"ingress.className=nginx\"\n```\n\nThe chart installs and configures the ingress automatically. You can verify the setup by getting the `Ingress` resource type:\n\n```shell\nkubectl get ingress -n gitlab-workspaces\n\nNAME                      CLASS   HOSTS                             ADDRESS   PORTS     AGE\ngitlab-workspaces-proxy   nginx   remote-dev.dev,*.remote-dev.dev             80, 443   9s\n```\n\n### Agent for Kubernetes installation\nCreate the agent configuration file in `.gitlab/agents/\u003Cagentname>/config.yaml`, add to git, and push it into the repository. The `remote_development` key specifies the `dns_zone`, which must be set to the workspaces domain. Additionally, the integration needs to be enabled. The `observability` key intentionally configures [debug logging](https://docs.gitlab.com/ee/user/clusters/agent/work_with_agent.html#debug-the-agent) for the first setup to troubleshoot faster. You can adjust the `logging` levels for production usage.\n\n```shell\nexport GL_AGENT_K8S=remote-dev-dev\n\n$ mkdir agent-kubernetes && cd agent-kubernetes\n$ mkdir -p .gitlab/agents/${GL_AGENT_K8S}/\n\n$ cat \u003C\u003CEOF >.gitlab/agents/${GL_AGENT_K8S}/config.yaml\nremote_development:\n    enabled: true\n    dns_zone: \"${GITLAB_WORKSPACES_PROXY_DOMAIN}\"\n\nobservability:\n  logging:\n    level: debug\n    grpc_level: warn\nEOF\n\n$ git add .gitlab/agents/${GL_AGENT_K8S}/config.yaml\n$ git commit -avm \"Add agent for Kubernetes configuration\"\n# adjust the URL to your GitLab server URL and project path\n$ git remote add origin https://gitlab.example.com/remote-dev-workspaces/agent-kubernetes.git\n# will create a private project when https/PAT is used\n$ git push\n```\n\nOpen the GitLab project in your browser, navigate into `Operate > Kubernetes Clusters`, and click the `Connect a new cluster (agent)` button. Select the agent from the configuration dropdown, and click `Register`. The form generates a ready-to-use Helm chart CLI command. Similar to the command below, replace `XXXXXXXXXXREPLACEME` with the actual token value.\n\n```shell\nhelm repo add gitlab https://charts.gitlab.io\nhelm repo update\nhelm upgrade --install remote-dev-dev gitlab/gitlab-agent \\\n    --namespace gitlab-agent-remote-dev-dev \\\n    --create-namespace \\\n    --set image.tag=v16.0.1 \\\n    --set config.token=XXXXXXXXXXREPLACEME \\\n    --set config.kasAddress=wss://kas.gitlab.com # Replace with your self-managed GitLab KAS instance URL if not using GitLab.com SaaS\n```\n\nRun the commands, and verify that the agent is connected in the `Operate > Kubernetes Clusters` overview. You can access the pod logs using the following command:\n\n```shell\n$ kubectl get ns\nNAME                          STATUS   AGE\ngitlab-agent-remote-dev-dev   Active   9d\ngitlab-workspaces             Active   22d\n...\n\n$ kubectl logs -f -l app.kubernetes.io/name=gitlab-agent -n gitlab-agent-$GL_AGENT_K8S\n```\n\n_Congrats! Your infrastructure setup for on-demand, cloud-based development environments is complete._\n\n## Workspaces creation\nAfter completing the infrastructure setup, you must verify that all components work together and users can create workspaces. You can fork or import the [`example-python-http-simple` project](https://gitlab.com/gitlab-de/use-cases/remote-development/example-python-http-simple) into your GitLab group with access to the GitLab agent for Kubernetes to try it immediately. The project provides a simple Python web app with Flask that provides different HTTP routes. Alternatively, start with a new project and create a `.devfile.yaml` with the [example configuration](https://docs.gitlab.com/ee/user/workspace/#example-configurations).\n\nOptional: Inspect the [`.devfile.yaml`](https://docs.gitlab.com/ee/user/workspace/#devfile) file to learn about the configuration format. We will look into the `image` key later.\n\n```yaml\nschemaVersion: 2.2.0\ncomponents:\n  - name: py\n    attributes:\n      gl/inject-editor: true\n    container:\n      # Use a custom image that supports arbitrary user IDs.\n      # NOTE: THIS IMAGE IS NOT ACTIVELY MAINTAINED. DEMO USE CASES ONLY, DO NOT USE IN PRODUCTION.\n      # Source: https://gitlab.com/gitlab-de/use-cases/remote-development/container-images/python-remote-dev-workspaces-user-id\n      image: registry.gitlab.com/gitlab-de/use-cases/remote-development/container-images/python-remote-dev-workspaces-user-id:latest\n      memoryRequest: 1024M\n      memoryLimit: 2048M\n      cpuRequest: 500m\n      cpuLimit: 1000m\n      endpoints:\n        - name: http-python\n          targetPort: 8080\n```\n\n### Create the first workspaces\nNavigate to the `Your Work > Workspaces` menu and create a new workspace. Search for the project name, select the agent for Kubernetes, and create the workspace.\n\n![GitLab remote development workspaces, Python example](https://about.gitlab.com/images/blogimages/infrastructure-cloud-development-environments/gitlab_remote_dev_workspaces_python.png){: .shadow}\n\nOpen two terminals to follow the workspaces proxy and agent logs in the Kubernetes cluster.\n\n```shell\n$ kubectl logs -f -l app.kubernetes.io/name=gitlab-workspaces-proxy -n gitlab-workspaces\n\n{\"level\":\"info\",\"ts\":1686331102.886607,\"caller\":\"server/server.go:74\",\"msg\":\"Starting proxy server...\"}\n{\"level\":\"info\",\"ts\":1686331133.146862,\"caller\":\"upstream/tracker.go:47\",\"msg\":\"New upstream added\",\"host\":\"8080-workspace-62029-5534214-2vxdxq.remote-dev.dev\",\"backend\":\"workspace-62029-5534214-2vxdxq.gl-rd-ns-62029-5534214-2vxdxq\",\"backend_port\":8080}\n2023/06/09 17:21:10 getHostnameFromState state=https://60001-workspace-62029-5534214-2vxdxq.remote-dev.dev/folder=/projects/demo-python-http-simple\n```\n\n```shell\n$ kubectl logs -f -l app.kubernetes.io/name=gitlab-agent -n gitlab-agent-$GL_AGENT_K8S\n\n{\"level\":\"debug\",\"time\":\"2023-06-09T18:36:19.839Z\",\"msg\":\"Applied event\",\"mod_name\":\"remote_development\",\"apply_event\":\"WaitEvent{ GroupName: \\\"wait-0\\\", Status: \\\"Pending\\\", Identifier: \\\"gl-rd-ns-62029-5534214-k66cjy_workspace-62029-5534214-k66cjy-gl-workspace-data__PersistentVolumeClaim\\\" }\",\"agent_id\":62029}\n{\"level\":\"debug\",\"time\":\"2023-06-09T18:36:19.866Z\",\"msg\":\"Received update event\",\"mod_name\":\"remote_development\",\"workspace_namespace\":\"gl-rd-ns-62029-5534214-k66cjy\",\"workspace_name\":\"workspace-62029-5534214-k66cjy\",\"agent_id\":62029}\n{\"level\":\"debug\",\"time\":\"2023-06-09T18:36:43.627Z\",\"msg\":\"Applied event\",\"mod_name\":\"remote_development\",\"apply_event\":\"WaitEvent{ GroupName: \\\"wait-0\\\", Status: \\\"Successful\\\", Identifier: \\\"gl-rd-ns-62029-5534214-k66cjy_workspace-62029-5534214-k66cjy_apps_Deployment\\\" }\",\"agent_id\":62029}\n```\n\nWait until the workspace is provisioned successfully, and click to open the HTTP URL, example format `https://60001-workspace-62029-5534214-2vxdxq.remote-dev.dev/?folder=%2Fprojects%2Fexample-python-http-simple`. The GitLab OAuth application will ask you for authorization.\n\n![GitLab OAuth provider app, example with the Developer Evangelism demo environment](https://about.gitlab.com/images/blogimages/infrastructure-cloud-development-environments/gitlab_remote_dev_workspaces_oauth_app.png){: .shadow}\n\nYou can select the Web IDE menu, open a new terminal (`cmd shift p` and search for `terminal create`). More shortcuts and Web IDE usage are documented [here](https://docs.gitlab.com/ee/user/project/web_ide/).\n\n![GitLab remote development workspaces, Python example, create terminal](https://about.gitlab.com/images/blogimages/infrastructure-cloud-development-environments/gitlab_remote_dev_workspaces_python_web_ide_create_terminal.png){: .shadow}\n\nUsing the Python example project, try to run the `hello.py` file with the Python interpreter after changing the terminal to `bash` to access auto-completion and shell history. Type `pyth`, press tab, type `hel`, press tab, enter.\n\n```shell\n$ bash\n\n$ python hello.py\n```\n\nThe command will fail because the Python requirements still need to be installed. Let us fix that by running the following command:\n\n```shell\n$ pip install -r requirements.txt\n```\n\n![GitLab remote development workspaces, Python example, install requirements in the terminal](https://about.gitlab.com/images/blogimages/infrastructure-cloud-development-environments/gitlab_remote_dev_workspaces_python_web_ide_terminal_install_pip.png){: .shadow}\n\n**Note**: This example is intentionally kept simple, and does not use best practices with `pyenv` for managing Python environments. We will explore development environment templates in future blog posts.\n\nRun the Python application `hello.py` again to start the web server on port 8080.\n\n```shell\n$ python hello.py\n```\n\nYou can access the exposed port by modifying the URL from the default port at the beginning of the URL to the exposed port `8080`. The `?folder` URL parameter can also be removed.\n\n```diff\n-https://60001-workspace-62029-5534214-kbtcmq.remote-dev.dev/?folder=/projects/example-python-http-simple\n+https://8080-workspace-62029-5534214-kbtcmq.remote-dev.dev/\n```\n\nThe URL is not publicly available and requires access through the GitLab OAuth session.\n\n![GitLab remote development workspaces, Python example, run webserver, access HTTP](https://about.gitlab.com/images/blogimages/infrastructure-cloud-development-environments/gitlab_remote_dev_workspaces_python_web_ide_terminal_run_webserver_access_http.png){: .shadow}\n\nModifying the workspace requires custom container images supporting to run with [arbitrary user IDs](https://docs.gitlab.com/ee/user/workspace/#arbitrary-user-ids). The example project uses a custom image which allows to install Python dependencies and create build artifacts. It also allows to use the bash terminal shown above. Learn more about custom image creation in the next section.\n\n### Custom workspace container images\nCustom container images require support for [arbitrary user IDs](https://docs.gitlab.com/ee/user/workspace/#arbitrary-user-ids). You can build custom container images with [GitLab CI/CD](/solutions/continuous-integration/) and use the [GitLab container registry](https://docs.gitlab.com/ee/user/packages/container_registry/) to distribute the container images on the DevSecOps platform.\n\nWorkspaces run with arbitrary user IDs in the Kubernetes cluster containers and manage resource access with Linux group permissions. Existing container images may need to be changed, and imported as base image for new container images. The [following example](https://gitlab.com/gitlab-de/use-cases/remote-development/container-images/python-remote-dev-workspaces-user-id/-/blob/main/Dockerfile) uses the `python:3.11-slim-bullseye` image from Docker Hub as a base container image in the `FROM` key. The next steps create and set a home directory in `/home/gitlab-workspaces`, and manage user and group access to specified directories. Additionally, you can install more convenience tools and configurations into the image, for example the `git` package.\n\n[`Dockerfile`](https://gitlab.com/gitlab-de/use-cases/remote-development/container-images/python-remote-dev-workspaces-user-id/-/blob/main/Dockerfile)\n```\n# Example demo for a Python-based container image.\n# NOTE: THIS IMAGE IS NOT ACTIVELY MAINTAINED. DEMO USE CASES ONLY, DO NOT USE IN PRODUCTION.\n\nFROM python:3.11-slim-bullseye\n\n# User id for build time. Runtime will be an arbitrary random ID.\nRUN useradd -l -u 33333 -G sudo -md /home/gitlab-workspaces -s /bin/bash -p gitlab-workspaces gitlab-workspaces\n\nENV HOME=/home/gitlab-workspaces\n\nWORKDIR $HOME\n\nRUN mkdir -p /home/gitlab-workspaces && chgrp -R 0 /home && chmod -R g=u /etc/passwd /etc/group /home\n\n# TODO: Add more convenience tools into the user home directory, i.e. enable color prompt for the terminal, install pyenv to manage Python environments, etc\nRUN apt update && \\\n    apt -y --no-install-recommends install git procps findutils htop vim curl wget && \\\n    rm -rf /var/lib/apt/lists/*\n\nUSER gitlab-workspaces\n```\n\n **As an exercise**, [fork the project](https://gitlab.com/gitlab-de/use-cases/remote-development/container-images/python-remote-dev-workspaces-user-id) and modify the package installation step in the `Dockerfile` file to install the `dnsutils` package on the Debian based image to get access to the `dig` command.\n\n[`Dockerfile`](https://gitlab.com/gitlab-de/use-cases/remote-development/container-images/python-remote-dev-workspaces-user-id/-/blob/main/Dockerfile)\n```diff\n-RUN apt update && \\\n-    apt -y --no-install-recommends install git procps findutils htop vim curl wget && \\\n-    rm -rf /var/lib/apt/lists/*\n+RUN apt update && \\\n+    apt -y --no-install-recommends install git procps findutils htop vim curl wget dnsutils && \\\n+    rm -rf /var/lib/apt/lists/*\n```\n\n[Build the container image](https://docs.gitlab.com/ee/ci/docker/using_docker_build.html) with your preferred CI/CD workflow. On GitLab.com SaaS, you can include the `Docker.gitlab-ci.yml` template which takes care of building the image.\n\n```yaml\ninclude:\n    - template: Docker.gitlab-ci.yml\n```\n\nWhen building the container images manually, use Linux and `amd64` as platform architecture [until multi-architecture support is available for running workspaces](https://gitlab.com/groups/gitlab-org/-/epics/10594). Also, review the [optimizing images guide in the documentation](https://docs.gitlab.com/ee/ci/pipelines/pipeline_efficiency.html#optimize-docker-images) when creating custom container images to optimize size and build times.\n\nNavigate into `Deploy > Container Registry` in the GitLab UI and copy the image URL from the tagged image. Open the `.devfile.yaml` file in the forked GitLab project `example-python-http-simple`, and change the `image` path to the newly built image URL.\n\n[`.devfile.yaml`](https://gitlab.com/gitlab-de/use-cases/remote-development/example-python-http-simple/-/blob/main/.devfile.yaml)\n```diff\n-      image: registry.gitlab.com/gitlab-de/use-cases/remote-development/container-images/python-remote-dev-workspaces-user-id:latest\n+      image: registry.gitlab.example.com/remote-dev-workspaces/python-remote-dev-workspaces-user-id:latest\n```\n\nNavigate into `Your Work > Workspaces` and create a new workspace for the project, and try to execute the `dig` command to query the IPv6 address of GitLab.com (or any other internal domain).\n\n```shell\n$ dig +short gitlab.com AAAA\n```\n\nThe custom container image project is located [here](https://gitlab.com/gitlab-de/use-cases/remote-development/container-images/python-remote-dev-workspaces-user-id/).\n\n## Tips\nThis blog post's setup steps with environment variables are easy to follow. For production usage, use automation to manage your environment with Terraform, Ansible, etc.\n\n- Terraform: [Provision a GKE Cluster (Google Cloud)](https://developer.hashicorp.com/terraform/tutorials/kubernetes/gke), [Provision an EKS Cluster (AWS)](https://developer.hashicorp.com/terraform/tutorials/kubernetes/eks), [Provision an AKS Cluster (Azure)](https://developer.hashicorp.com/terraform/tutorials/kubernetes/aks), [Deploy Applications with the Helm Provider](https://developer.hashicorp.com/terraform/tutorials/kubernetes/helm-provider)\n- Ansible: [google.cloud.gcp_container_cluster module](https://docs.ansible.com/ansible/latest/collections/google/cloud/gcp_container_cluster_module.html), [community.aws.eks_cluster module](https://docs.ansible.com/ansible/latest/collections/community/aws/eks_cluster_module.html), [azure.azcollection.azure_rm_aks module](https://docs.ansible.com/ansible/latest/collections/azure/azcollection/azure_rm_aks_module.html), [kubernetes.core collection](https://docs.ansible.com/ansible/latest/collections/kubernetes/core/index.html#plugin-index)\n\n### Certificate management\nThe workspaces domain requires a valid TLS certificate. The examples above used certbot with Let's Encrypt, requiring a certificate renewal after three months. Depending on your corporate requirements, you may need to create TLS certificates signed by the corporate CA identity and manage the certificates. Alternatively, you can look into solutions like [cert-manager for Kubernetes](https://cert-manager.io/docs/getting-started/) that will help renew certificates automatically.\n\nDo not forget to add TLS certificate validity monitoring to avoid unforeseen errors. The [blackbox exporter for Prometheus](https://github.com/prometheus/blackbox_exporter) can help with monitoring TLS certificate expiry and send alerts.\n\n### Troubleshooting\nHere are a few tips for troubleshooting connections and inspecting the cluster resources.\n\n#### Verify the connections\nTry to connect to the workspaces domain to see whether the Kubernetes Ingress controller responds to HTTP requests.\n\n```shell\n$ curl -vL ${GITLAB_WORKSPACES_PROXY_DOMAIN}\n```\n\nInspect the logs of the proxy deployment to follow connection requests. Since the proxy requires an authorization token sent via the OAuth app, an HTTP 400 error is expected for unauthenticated curl requests.\n\n```shell\n$ kubectl logs -f -l app.kubernetes.io/name=gitlab-workspaces-proxy -n gitlab-workspaces\n```\n\nCheck if the TLS certificate is valid. You can also use `sslcan` and other tools.\n\n```shell\n$ openssl s_client -connect ${GITLAB_WORKSPACES_PROXY_DOMAIN}:443\n\n$ sslcan ${GITLAB_WORKSPACES_PROXY_DOMAIN}\n```\n\n[Debug the agent for Kubernetes](https://docs.gitlab.com/ee/user/clusters/agent/work_with_agent.html#debug-the-agent) and inspect the pod logs.\n\n```shell\n$ kubectl get ns\n\n$ kubectl logs -f -l app.kubernetes.io/name=gitlab-agent -n gitlab-agent-\u003CNAMESPACENAME>\n```\n\n#### Workspaces cannot be created even if the agent is connected\nWhen the workspaces deployment is spinning and nothing happens, try restarting the workspaces proxy and agent for Kubernetes. This is a known problem and tracked [in this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/414399#note_1426652421).\n\n```shell\n$ kubectl rollout restart deployment -n gitlab-workspaces\n\n$ kubectl rollout restart deployment -n gitlab-agent-$GL_AGENT_K8S\n```\n\nIf the agent for Kubernetes remains unresponsive, consider a complete reinstall. First, navigate into the GitLab UI into `Operate > Kubernetes Clusters` and [delete the agent](https://docs.gitlab.com/ee/user/clusters/agent/work_with_agent.html#remove-an-agent-through-the-gitlab-ui). Next, use the following commands to delete the Helm release from the cluster, and run the installation command generated from the UI again.\n\n```shell\nkubectl get ns\nhelm list -A\n\nexport RELEASENAME=xxx\nexport NAMESPACENAME=xxx\nexport TOKEN=XXXXXXXXXXREPLACEME\nhelm uninstall ${RELEASENAME} -n gitlab-agent-${NAMESPACENAME}\n\nhelm repo add gitlab https://charts.gitlab.io\nhelm repo update\n\nhelm upgrade --install ${RELEASENAME} gitlab/gitlab-agent \\\n    --namespace gitlab-agent-${NAMESPACENAME} \\\n    --create-namespace \\\n    --set image.tag=v16.1.2 \\\n    --set config.token=${TOKEN} \\\n    --set config.kasAddress=wss://kas.gitlab.com # Replace with your self-managed GitLab KAS instance URL if not using GitLab.com SaaS\n```\n\nExample: `helm uninstall remote-dev-dev -n gitlab-agent-remote-dev-dev`\n\n#### Cannot modify workspace using custom images\nIf you cannot modify the workspace, open a new terminal and check the user id and their groups.\n\n```shell\n$ id\n```\n\nInspect the `.devfile.yaml` file in the project and extract the `image` attribute to test the used container image. You can use container CLI, for example `docker` that runs the container with a different user ID. Note: You can use any user ID to test the behavior.\n\nTip: Use grep and cut commands to extract the image attribute URL from the `.devfile.yaml`.\n\n```shell\n$ cat .devfile.yaml | grep image: | cut -f2 -d ':')\n```\n\nRun the following command to execute the `id` command in the container, and print the user information.\n\n```\n$ docker run -u 1234 -ti registry.gitlab.com/path/to/project/image:tagname id\n```\n\nTry to modify the workspace by running the command `echo 'Hi' >> ~/example.md`. This can fail with a permission error.\n\n```shell\n$ docker run -u 1234 -ti registry.gitlab.com/path/to/project/image:tagname echo 'Hi' >> ~/example.md\n```\n\nIf the above command failed, the Linux user group does not have enough permissions to modify the file. You can view the permissions using the `ls` command.\n\n```shell\n$ docker run -u 1234 -ti registry.gitlab.com/path/to/project/image:tagname ls -lart ~/\n```\n\n### Contribute\nThe [remote development developer documentation](https://gitlab.com/gitlab-org/remote-development/gitlab-remote-development-docs) provides insights into the [architecture blueprint](https://docs.gitlab.com/ee/architecture/blueprints/remote_development/) and how to set up a local development environment to [start contributing](/community/contribute/). In the future, we will be able to use remote development workspaces to develop remote development workspaces.\n\n## Share your feedback\nIn this blog post, you have learned how to manage the infrastructure for remote development workspaces, create your first workspace, and more tips on custom workspace images and troubleshooting. Using the same development environment across organizations and communities, developers can focus on writing code and get fast preview feedback (i.e., by running a web server that can be accessed externally in the remote workspace). Providing the same reproducible environment also helps opensource contributors to reproduce bugs and provide feedback most efficiently. They can use the same best practices as upstream maintainers.\n\nDevelopers and DevOps engineers will be using the Web IDE in workspaces. Later, being able to [connect their desktop client to workspaces](https://gitlab.com/groups/gitlab-org/-/epics/10478), they can take advantage of even more efficiency with the [most comprehensive AI-powered DevSecOps platform](/gitlab-duo/): Code suggestions and more AI-powered workflows are just one fingertip away.\n\nWhat will your teams build with remote development workspaces? Please share your experiences in the [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/410031), blog about your setup, and join our [community forum](https://forum.gitlab.com/) for more discussions.\n\nCover image by [Nick Karvounis](https://unsplash.com/@nickkarvounis) on [Unsplash](https://unsplash.com/photos/SmIM3m8f3Pw)",[9,2533,2281,888,3237],{"slug":3622,"featured":6,"template":698},"set-up-infrastructure-for-cloud-development-environments","content:en-us:blog:set-up-infrastructure-for-cloud-development-environments.yml","Set Up Infrastructure For Cloud Development Environments","en-us/blog/set-up-infrastructure-for-cloud-development-environments.yml","en-us/blog/set-up-infrastructure-for-cloud-development-environments",{"_path":3628,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3629,"content":3635,"config":3641,"_id":3643,"_type":14,"title":3644,"_source":16,"_file":3645,"_stem":3646,"_extension":19},"/en-us/blog/solving-complex-challenges-with-gitlab-duo-workflow",{"title":3630,"description":3631,"ogTitle":3630,"ogDescription":3631,"noIndex":6,"ogImage":3632,"ogUrl":3633,"ogSiteName":685,"ogType":686,"canonicalUrls":3633,"schema":3634},"Solving complex challenges with GitLab Duo Workflow","Learn how a member of the GitLab Customer Success Management team uses agentic AI for real-world problem-solving, including addressing Helm chart limits in the package registry.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097663/Blog/Hero%20Images/Blog/Hero%20Images/Workflow%201800x945_2gQoQIbY9NvjLFpXtsxtXy_1750097663612.png","https://about.gitlab.com/blog/solving-complex-challenges-with-gitlab-duo-workflow","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Solving complex challenges with GitLab Duo Workflow\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Johannes Bauer\"}],\n        \"datePublished\": \"2025-04-23\",\n      }",{"title":3630,"description":3631,"authors":3636,"heroImage":3632,"date":3638,"body":3639,"category":801,"tags":3640},[3637],"Johannes Bauer","2025-04-23","As a Customer Success Manager (CSM), I often face complex challenges that require quick, efficient, and innovative solutions. Recently, one of my enterprise customers encountered an issue with Helm chart limits in the GitLab package registry. At GitLab, we are committed to dogfooding our DevSecOps platform, so I turned to [GitLab Duo Workflow](https://about.gitlab.com/gitlab-duo/agent-platform/), our secure, agentic AI offering that is currently in private beta.\n\n[Agentic AI](https://about.gitlab.com/topics/agentic-ai/) represents the next evolution of generative AI, designed to go beyond single-task automation, such as code completion or test generation. While generative AI focuses on creating content based on specific prompts, agentic AI introduces a level of autonomy and context awareness, enabling it to perform complex, multi-step workflows. This advanced AI operates as a capable assistant that understands the broader goals of a task, plans intermediate steps, and executes them efficiently.\n\nAgentic AI, more specifically Duo Workflow, has transformed how I approach problem-solving as a CSM. Here's how I used Duo Workflow to resolve a real-world challenge and deliver value to my customer.\n\n## Addressing Helm chart limits in the GitLab package registry\n\nThe customer was dealing with a hardcoded limit for Helm charts in the GitLab package registry, which wasn’t flexible enough for their needs. They required a solution that allowed administrators to configure limits dynamically through the Admin UI. Addressing this issue was critical to maintaining their workflow efficiency and satisfaction.\n\n### How I leveraged GitLab Duo Workflow\n\nTo craft an effective solution, I leveraged Duo Workflow, a game-changer for brainstorming and strategizing with agentic AI.\n\nMy prompt was:\n\n> I am working on the following issue [insert issue number] and would like to resolve it. We have developed the following strategy:\n>\n> 1. Implement the feature as an application setting in the Admin UI, rather than using a configuration file approach.\n> 2. Utilize the existing package_registry jsonb column in the application settings table. Add a new key to this JSON structure to store the Helm chart limit.\n> 3. Create a new section in the Admin UI (/admin/application_settings/ci_cd#js-package-settings) for “specific limits for each format”. This will require some design work to determine how to best display the setting.\n> 4. Add a number field in this new section to allow administrators to set the Helm chart limit.\n> 5. Update the backend logic in the Helm package registry (app/finders/packages/helm/packages_finder.rb) to use this new application setting instead of the hardcoded limit.\n>\n>Please organize your updates into Frontend and Backend changes. Additionally, could you check if there are any specific requirements when adding a new column to the application settings table?\n\nThis prompt laid out a clear plan, enabling Duo Workflow to provide structured recommendations and actionable insights. The strategy developed was straightforward yet impactful. The changes were structured into frontend and backend tasks, ensuring clear separation and collaboration.\n\n![Result of GitLab Duo Workflow prompt](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097675/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097674930.png)\n\n![Result of GitLab Duo Workflow prompt - 2](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097675/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097674931.png)\n\n## Results\n\nAfter implementing the solution together with Duo Workflow, I submitted a merge request, which was successfully merged and became available in GitLab 17.10!\n\nThe result? A flexible, user-friendly way for administrators to configure Helm chart limits, significantly enhancing the customer’s experience and aligning with their operational goals.\n\n![GitLab package registry page](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097675/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097674933.png)\n\nThe customer is thrilled to have this functionality available as it provides them with the flexibility and control they need to streamline their workflows. What’s even more exciting is that it only took me **hours** to complete the implementation, compared to the **days** I initially estimated, thanks to Duo Workflow. As someone without prior development experience in GitLab, this was a huge win!\n\nThis experience taught me the immense potential of AI tools like [GitLab Duo](https://about.gitlab.com/gitlab-duo/). By combining human expertise with AI-driven insights, we can address even the most complex challenges effectively. As a CSM, leveraging such tools streamlines problem-solving and strengthens trust and partnership with customers.\n\n## Delivering value\n\nGitLab Duo Workflow empowered me to tackle a complex technical issue head-on and find a solution that exceeded customer expectations. If you'd like to try Duo Workflow in your development environment, please [sign up for our private beta waitlist](https://about.gitlab.com/gitlab-duo/agent-platform/).\n\n## Learn more\n- [GitLab Duo Workflow: Enterprise visibility and control for agentic AI](https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai/)\n- [Use GitLab Duo Workflow to improve application quality assurance](https://about.gitlab.com/blog/use-gitlab-duo-workflow-to-improve-application-quality-assurance/)\n- [GitLab Duo Workflow documentation](https://docs.gitlab.com/user/duo_workflow/)\n- [What is agentic AI?](https://about.gitlab.com/topics/agentic-ai/)\n- [GitLab Duo](https://about.gitlab.com/gitlab-duo/)",[803,9,717],{"slug":3642,"featured":91,"template":698},"solving-complex-challenges-with-gitlab-duo-workflow","content:en-us:blog:solving-complex-challenges-with-gitlab-duo-workflow.yml","Solving Complex Challenges With Gitlab Duo Workflow","en-us/blog/solving-complex-challenges-with-gitlab-duo-workflow.yml","en-us/blog/solving-complex-challenges-with-gitlab-duo-workflow",{"_path":3648,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3649,"content":3654,"config":3658,"_id":3660,"_type":14,"title":3661,"_source":16,"_file":3662,"_stem":3663,"_extension":19},"/en-us/blog/speed-up-code-reviews-let-ai-handle-the-feedback-implementation",{"title":3650,"description":3651,"ogTitle":3650,"ogDescription":3651,"noIndex":6,"ogImage":1454,"ogUrl":3652,"ogSiteName":685,"ogType":686,"canonicalUrls":3652,"schema":3653},"Speed up code reviews: Let AI handle the feedback implementation","Discover how GitLab Duo with Amazon Q automates the implementation of code review feedback through AI, transforming a time-consuming manual process into a streamlined workflow.","https://about.gitlab.com/blog/speed-up-code-reviews-let-ai-handle-the-feedback-implementation","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Speed up code reviews: Let AI handle the feedback implementation\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Cesar Saavedra\"}],\n        \"datePublished\": \"2025-06-10\",\n      }",{"title":3650,"description":3651,"authors":3655,"heroImage":1454,"date":845,"body":3656,"category":801,"tags":3657},[798],"You know that feeling when you've just submitted a merge request and the code review comments start rolling in? One reviewer wants the labels updated, another asks for side-by-side layouts, someone else requests bold formatting, and don't forget about that button color change. Before you know it, you're spending hours implementing feedback that, while important, takes you away from building new features. It's a time-consuming process that every developer faces, yet it feels like there should be a better way.\n\nWhat if you could have an AI assistant that understands code review feedback and automatically implements the changes for you? That's exactly what [GitLab Duo with Amazon Q](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws/) brings to your development workflow. This seamless integration combines GitLab's comprehensive DevSecOps platform with Amazon Q's advanced AI capabilities, creating an intelligent assistant that can read reviewer comments and converts them directly into code changes. Instead of manually addressing each piece of feedback, you can let AI handle the implementation while you focus on the bigger picture.\n\n## How GitLab Duo with Amazon Q works\n\nWhen you're viewing a merge request with reviewer comments, you'll see feedback scattered throughout your code. Let's take the examples from earlier in this article: maybe you've received a request to update a form label here, a suggestion to display fields side-by-side there, or a note about making certain text bold. Each comment represents a task that normally you'd need to handle manually.\n\n![feedback on an MR](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673634/Blog/Content%20Images/1-show-comment.png)\n\nWith GitLab Duo with Amazon Q, you can simply enter the `/q dev` quick action in a comment. This prompts Amazon Q to analyze all the feedback and start modifying your code automatically. The AI agent understands the context of each comment and implements the requested changes directly in your codebase.\n\n![/q dev function prompting Amazon Q to analyze feedback](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673634/Blog/Content%20Images/2-invoke-q-dev.png)\n\nOnce Amazon Q processes the feedback, you can view all the updates in the \"Changes\" tab of your merge request. Every modification is clearly visible, so you can verify that the AI agent correctly interpreted and implemented each piece of feedback. You can then run your updated application to confirm that all the changes work as expected — that form label is updated, the fields are displayed side-by-side, the text is bold, and yes, that button is now blue.\n\nWatch the code review feedback process in action:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/31E9X9BrK5s?si=ThFywR34V3Bfj1Z-\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\nProcessing code review feedback is a necessary but time-intensive part of software development.  GitLab Duo with Amazon Q evolves this manual process into an automated workflow, dramatically reducing the time between receiving feedback and implementing changes. By letting AI handle these routine modifications, you're free to focus on what really matters — building innovative features and solving complex problems.\n\nWith GitLab Duo with Amazon Q, you can:\n- Eliminate hours of manual feedback implementation\n- Accelerate your code review cycles\n- Maintain consistency in how feedback is addressed\n- Reduce context switching between reviewing comments and writing code\n- Ship features faster with streamlined deployment times\n\n> #### To learn more about GitLab Duo with Amazon Q visit us at an upcoming [AWS Summit in a city near you](https://about.gitlab.com/events/aws-summits/) or [reach out to your GitLab representative](https://about.gitlab.com/partners/technology-partners/aws/#form).\n\n## GitLab Duo with Amazon Q resources\n\n- [GitLab Duo with Amazon Q: Agentic AI optimized for AWS generally available](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-agentic-ai-optimized-for-aws/)\n- [GitLab and AWS partner page](https://about.gitlab.com/partners/technology-partners/aws/)\n- [GitLab Duo with Amazon Q documentation](https://docs.gitlab.com/user/duo_amazon_q/)\n- [What is agentic AI?](https://about.gitlab.com/topics/agentic-ai/)\n- [Agentic AI guides and resources](https://about.gitlab.com/blog/agentic-ai-guides-and-resources/)",[717,695,9,805,804,719],{"slug":3659,"featured":91,"template":698},"speed-up-code-reviews-let-ai-handle-the-feedback-implementation","content:en-us:blog:speed-up-code-reviews-let-ai-handle-the-feedback-implementation.yml","Speed Up Code Reviews Let Ai Handle The Feedback Implementation","en-us/blog/speed-up-code-reviews-let-ai-handle-the-feedback-implementation.yml","en-us/blog/speed-up-code-reviews-let-ai-handle-the-feedback-implementation",{"_path":3665,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3666,"content":3671,"config":3676,"_id":3678,"_type":14,"title":3679,"_source":16,"_file":3680,"_stem":3681,"_extension":19},"/en-us/blog/streamline-devsecops-engineering-workflows-with-gitlab-duo",{"title":3667,"description":3668,"ogTitle":3667,"ogDescription":3668,"noIndex":6,"ogImage":817,"ogUrl":3669,"ogSiteName":685,"ogType":686,"canonicalUrls":3669,"schema":3670},"Streamline DevSecOps engineering workflows with GitLab Duo","Learn all the ways GitLab Duo's AI capabilities can improve the efficiency of development workflows. Includes in-depth tutorials and demos.","https://about.gitlab.com/blog/streamline-devsecops-engineering-workflows-with-gitlab-duo","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Streamline DevSecOps engineering workflows with GitLab Duo\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Abubakar Siddiq Ango\"}],\n        \"datePublished\": \"2024-12-05\",\n      }",{"title":3667,"description":3668,"authors":3672,"heroImage":817,"date":3673,"body":3674,"category":801,"tags":3675},[1538],"2024-12-05","It's 9 a.m. somewhere, and a DevOps engineer is starting their day. They check their [GitLab todo list](https://docs.gitlab.com/ee/user/todos.html) to see any mentions or tasks assigned to them, collaborating with other stakeholders in their organization. These tasks can include:\n\n- managing infrastructure\n- maintaining the configuration of resources\n- maintaining CI/CD pipelines\n- automating processes for efficiency\n- maintaining monitoring and alerting systems\n- ensuring applications are securely built and deployed\n- modernizing applications with containerization\n\nTo carry out these tasks, DevOps engineers spend a lot of time reading documentation, writing configuration files, and searching for help in forums, issues boards, and blogs. Time is spent studying and understanding concepts, and how tools and technologies work. When they don't work as expected, a lot more time is spent investigating why. New tools are released regularly to solve niche or existing problems differently, which introduces more things to learn and maintain context for.\n\n[GitLab Duo](https://about.gitlab.com/gitlab-duo/), our AI-powered suite of capabilities, fits into the workflow of DevSecOps engineers, enabling them to reduce time spent solving problems while increasing their efficiency.\n\nLet's explore how GitLab Duo helps streamline workflows.\n\n## Collaboration and communication\n\nDiscussions or requests for code reviews require spending time reading comments from everyone and carefully reviewing the work shared. GitLab Duo capabilities like Discussion Summary, Code Review Summary, and Merge Request Summary increase the effectiveness of collaboration by reducing the time required to get caught up on activities and comments, with more time spent getting the actual work done.\n\n### Merge Request Summary  \n\nWriting a detailed and clear summary of the change a merge request introduces is crucial for every stakeholder to understand what, why, and how a change was made. It's more difficult than it sounds to effectively articulate every change made, especially in a large merge request. [Merge Request Summary](https://docs.gitlab.com/ee/user/project/merge_requests/duo_in_merge_requests.html#generate-a-description-by-summarizing-code-changes) analyzes the change's diff and provides a detailed summary of the changes made.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/4muvSFuWWL4?si=1i2pkyqXZGn2dSbd\" title=\"GitLab Duo Chat is now aware of Merge Requests\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### Discussion Summary\n\nImagine getting pulled into an issue with more than 100 comments and a lengthy description, with different perspectives and opinions shared. GitLab Duo [Discussion Summary](https://docs.gitlab.com/ee/user/discussions/index.html#summarize-issue-discussions-with-duo-chat) summarizes all the conversations in the issue and identifies tasks that need to be done, reducing time spent. \n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/IcdxLfTIUgc?si=WXlINow3pLoKHBVM\" title=\"GitLab Duo Dicussion Summary\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n#### Code Review Summary\n\nA merge request has been assigned to a DevOps engineer for review in preparation for deployment, and they have spent time reviewing several parts of the change with multiple comments and suggestions. When [submitting a review](https://docs.gitlab.com/ee/user/project/merge_requests/reviews/index.html#submit-a-review), a text box is presented to summarize the review, which often requires taking a pause and articulating the review. With [Code Review Summary](https://docs.gitlab.com/ee/user/project/merge_requests/reviews/index.html#submit-a-review), they get a concise summary automatically drafted leading to efficiency.\n\n## Manage infrastructure changes\n\nPart of a DevOps engineer's workflow is managing infrastructure changes. Infrastructure as code ([IaC](https://docs.gitlab.com/ee/user/infrastructure/iac/)) revolutionized this process, allowing for documentation, consistency, faster recovery, accountability, and collaboration. A challenge with IaC is understanding the requirements and syntax of the chosen tool and provider where the infrastructure will be created. A lot of time is then spent reviewing documentation and tweaking configuration files until they meet expectations. \n\nWith GitLab Duo [Code Explanation](https://docs.gitlab.com/ee/user/gitlab_duo/index.html#code-explanation) and [Code Suggestions](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/index.html), you can prompt GitLab Duo to create configuration files in your tool of choice and learn about the syntax of those tools. With Code Suggestions, you can either leverage [code generation](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/index.html#code-generation), where you prompt GitLab Duo to generate the configuration, or code completion, which provides suggestions as you type while maintaining the context of your existing configurations.\n\nAs of the time this article was published, Terraform is [supported by default](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/supported_extensions.html#supported-languages) with the right extensions for your IDEs. Other technologies can be supported with [additional language support configuration](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/supported_extensions.html#add-support-for-more-languages) for the [GitLab Workflow extension](https://docs.gitlab.com/ee/editor_extensions/visual_studio_code/index.html).\n\nWhere a technology is not officially supported, [GitLab Duo Chat](https://docs.gitlab.com/ee/user/gitlab_duo_chat/examples.html) is the powerful AI assistant that can help generate, explain, clarify, and troubleshoot your configuration, while maintaining context from selected text or opened files. Here are two demos where GitLab Duo helped create IaC with Terraform and AWS CloudFormation.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/saa2JJ57UaQ?si=Bu9jyQWwuSUcw8vr\" title=\"Manage your Infrastructure with Terraform and AI using GitLab Duo\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n\u003Cbr>\u003C/br>\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/KSLk2twXqiI?si=QDdERjbM0f7X2p23\" title=\"Deploying AWS Lambda function using AWS Cloudformation with help from GitLab Duo\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### Configuration management\n\nOnce your infrastructure is up, GitLab Duo Chat can also help create configuration files and refactor existing ones. These can be Ansible configurations for infrastructure or cloud-native configurations using Docker, Kubernetes, or Helm resource files. In the videos below, I demonstrate how GitLab Duo helps with Ansible, containerization, and application deployment to Kubernetes.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/t6ZCq_jkBwY?si=awCUdu1wCgOO21XR\" title=\"Configuring your Infrastructure with Ansible & GitLab Duo\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n\u003Cbr>\u003C/br>\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/KSLk2twXqiI?si=QDdERjbM0f7X2p23\" title=\"Containerizing your application with GitLab Duo\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n\u003Cbr>\u003C/br>\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/uroSxvMFqPU?si=GMNC7f2b7i_cjn6F\" title=\"Deploying your application to Kubernetes with Help from GitLab Duo\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n\u003Cbr>\u003C/br>\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/9yGDM00RlUA?si=kE5JZD_OEFcxeR7E\" title=\"Deploying to Kubernetes using Helm with help from GitLab Duo\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### Test, test, test\n\nWriting tests is an important part of building secure software, but it can be a chore and often becomes an afterthought. You can leverage the power of GitLab Duo to [generate tests for your code](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/index.html#vulnerability-resolution) by highlighting your code and typing the `/tests` in the Chat panel of your IDE.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/zWhwuixUkYU?si=wI93j90PIiUMyGcV\" title=\"GitLab Duo Test Generation\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### CI/CD pipeline troubleshooting\n\nAutomation is an essential part of the DevOps engineer's workflow, and Continuous Integration/Deployment ([CI/CD](https://about.gitlab.com/topics/ci-cd/)) is central to this. You can trigger CI jobs on code push, merge, or on schedule. But, when jobs fail, you spend a lot of time reading through the logs to identify why, and for cryptic errors, it can take more time to figure out. [GitLab Duo Root Cause Analysis](https://about.gitlab.com/blog/developing-gitlab-duo-blending-ai-and-root-cause-analysis-to-fix-ci-cd/) analyzes your failed job log and errors, and then recommends possible fixes. This reduces the time spent investigating the errors and finding a fix.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/Sa0UBpMqXgs?si=IyR-skz9wJMBSicE\" title=\"GitLab Duo Root Cause Analysis\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### Building secure applications\n\nPart of software development includes discovering vulnerabilities, either in the application or its dependencies. Some vulnerabilities are easy to fix, while others require creating a milestone with planning. GitLab Duo [Vulnerability Explanation](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/index.html#explaining-a-vulnerability) and [Vulnerability Resolution](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/index.html#vulnerability-resolution) reduce the time spent researching and fixing vulnerabilities. Vulnerability Explanation explains why a vulnerability is happening, its impact, and how to fix it, helping the DevOps engineer to upskill. Vulnerability Resolution takes it further – instead of just suggesting a fix, it creates a merge request with a fix for the vulnerability for you to review. \n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/MMVFvGrmMzw?si=Fxc4SeOkCBKwUk_k\" title=\"GitLab Duo Vulnerability Explanation\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n\u003Cbr>\u003C/br>\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/VJmsw_C125E?si=XT3Qz5SsX-ISfCyq\" title=\"GitLab Duo Vulnerability resolution\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## More work done with less stress\n\nWith GitLab Duo, DevOps engineers can do more work deploying and maintaining secure applications, while acquiring more skills with the detailed responses from GitLab Duo Chat.\n\n> [Sign up for a free trial of GitLab Duo](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/) to get started today!",[803,109,951,9,719,717],{"slug":3677,"featured":91,"template":698},"streamline-devsecops-engineering-workflows-with-gitlab-duo","content:en-us:blog:streamline-devsecops-engineering-workflows-with-gitlab-duo.yml","Streamline Devsecops Engineering Workflows With Gitlab Duo","en-us/blog/streamline-devsecops-engineering-workflows-with-gitlab-duo.yml","en-us/blog/streamline-devsecops-engineering-workflows-with-gitlab-duo",{"_path":3683,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3684,"content":3690,"config":3694,"_id":3696,"_type":14,"title":3697,"_source":16,"_file":3698,"_stem":3699,"_extension":19},"/en-us/blog/streamline-migrations-with-user-contribution-and-membership-mapping",{"title":3685,"description":3686,"ogTitle":3685,"ogDescription":3686,"noIndex":6,"ogImage":3687,"ogUrl":3688,"ogSiteName":685,"ogType":686,"canonicalUrls":3688,"schema":3689},"Streamline migrations with user contribution and membership mapping","New GitLab feature enhances project imports, allowing post-import user contribution mapping and greater flexibility and control.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749663670/Blog/Hero%20Images/blog-image-template-1800x945__13_.png","https://about.gitlab.com/blog/streamline-migrations-with-user-contribution-and-membership-mapping","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Streamline migrations with user contribution and membership mapping\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Magdalena Frankiewicz\"}],\n        \"datePublished\": \"2024-11-25\",\n      }",{"title":3685,"description":3686,"authors":3691,"heroImage":3687,"date":2925,"body":3692,"category":717,"tags":3693},[1639],"We are excited to announce a new feature that enhances the group and project import process: improved [user contribution and membership mapping](https://docs.gitlab.com/ee/user/project/import/#user-contribution-and-membership-mapping). This feature represents a significant improvement in the project import process, offering greater flexibility and control for both users managing the import process and users receiving contribution reassignments. The feature is available in [GitLab migration by direct transfer](https://docs.gitlab.com/ee/user/group/import/), [GitHub importer](https://docs.gitlab.com/ee/user/project/import/github.html), [Bitbucket Server importer](https://docs.gitlab.com/ee/user/project/import/bitbucket_server.html), and [Gitea importer](https://docs.gitlab.com/ee/user/project/import/gitea.html).\n\n## What's changing?\n\n1. Post-import mapping: Previously unavailable, this feature allows you to assign imported contributions and memberships to users on the destination instance after the import has been completed. Imported memberships and contributions are first mapped to placeholder users. Until they are reassigned, contributions display as associated with placeholders.  \n2. Email-independent mapping: The new process doesn't rely on email addresses, allowing you to map contributions for users who may have different email addresses on source and destination instances.  \n3. User control: Each user on the destination instance who is assigned a contribution mapping has to [accept the assignment](https://docs.gitlab.com/ee/user/project/import/#accept-contribution-reassignment) before any imported contributions are attributed to them. They can also [reject the assignment](https://docs.gitlab.com/ee/user/project/import/#reject-contribution-reassignment).\n\n## Key points for your migration\n\nAs you prepare to migrate your resources, here are some important aspects to familiarize yourself with:\n\n1. Placeholder users: Take some time to understand the concept of [placeholder users](https://docs.gitlab.com/ee/user/project/import/#placeholder-users) in GitLab. Consider how the [placeholder limits](https://docs.gitlab.com/ee/user/project/import/#placeholder-user-limits) apply to your specific use case.\n2. [Reassignment process](https://docs.gitlab.com/ee/user/project/import/#reassign-contributions-and-memberships): Explore the reassignment interface in the UI. It's designed with security in mind, so be sure to [review these security considerations](https://docs.gitlab.com/ee/user/project/import/#security-considerations).  \n3. User involvement: Inform your team about the migration process, particularly how they can [accept contribution reassignment](https://docs.gitlab.com/ee/user/project/import/#accept-contribution-reassignment). This helps provide for a smooth transition for everyone involved.\n\nBy understanding these aspects, you'll be well prepared for a successful migration.\n\n## What’s next\n\nWe are committed to enhancing this feature further. Key upcoming currently planned improvements include:\n\n1. CSV-based contribution reassignment:  \n   * Enable group owners to [reassign contributions via CSV file upload](https://gitlab.com/gitlab-org/gitlab/-/issues/455901)  \n   * Particularly beneficial for large-scale customers managing numerous users  \n2. Enhanced UI [visibility of the placeholder limits and counts](https://gitlab.com/gitlab-org/gitlab/-/issues/486691)\n\nFor a full list of further anticipated improvements, see the [User Contribution Mapping epic](https://gitlab.com/groups/gitlab-org/-/epics/14774).\n\n## Availability\n\n* This feature is available by default for direct transfer migrations on GitLab.com, GitLab Self-managed, and GitLab Dedicated instances from GitLab Version 17.7.\n* This feature is available by default in GitHub, Bitbucket Server, and Gitea importers on GitLab.com from GitLab 17.7 and on GitLab Self-Managed and GitLab Dedicated instances from GitLab Version 17.8.\n\n## Feedback and support\n\nWe value your feedback! If you encounter any issues or have suggestions regarding this change, please add a comment in the [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/502565).",[9,695,717],{"slug":3695,"featured":6,"template":698},"streamline-migrations-with-user-contribution-and-membership-mapping","content:en-us:blog:streamline-migrations-with-user-contribution-and-membership-mapping.yml","Streamline Migrations With User Contribution And Membership Mapping","en-us/blog/streamline-migrations-with-user-contribution-and-membership-mapping.yml","en-us/blog/streamline-migrations-with-user-contribution-and-membership-mapping",{"_path":3701,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3702,"content":3708,"config":3712,"_id":3714,"_type":14,"title":3715,"_source":16,"_file":3716,"_stem":3717,"_extension":19},"/en-us/blog/streamline-the-path-to-cmmc-level-2-compliance-with-gitlab",{"title":3703,"description":3704,"ogTitle":3703,"ogDescription":3704,"noIndex":6,"ogImage":3705,"ogUrl":3706,"ogSiteName":685,"ogType":686,"canonicalUrls":3706,"schema":3707},"Streamline the path to CMMC Level 2 compliance with GitLab","Learn how GitLab’s comprehensive, AI-powered DevSecOps platform can help organizations meet Cybersecurity Maturity Model Certification Level 2 compliance requirements.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098208/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_479904468%20%281%29_4lmOEVlaXP0YC3hSFmOw6i_1750098208185.jpg","https://about.gitlab.com/blog/streamline-the-path-to-cmmc-level-2-compliance-with-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Streamline the path to CMMC Level 2 compliance with GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Joseph Longo\"}],\n        \"datePublished\": \"2025-01-07\",\n      }",{"title":3703,"description":3704,"authors":3709,"heroImage":3705,"date":3430,"body":3710,"category":693,"tags":3711},[2412],"The [Cybersecurity Maturity Model Certification (CMMC)](https://dodcio.defense.gov/cmmc/About/) Program is a framework developed by the U.S. Department of Defense (DoD) to enforce cybersecurity requirements and protect sensitive unclassified information shared by the DoD with contractors and subcontractors.\n\nWith the release of the CMMC [final rule](https://www.federalregister.gov/documents/2024/10/15/2024-22905/cybersecurity-maturity-model-certification-cmmc-program), DoD contractors can begin to assess and align their controls and processes to be compliant with CMMC’s requirements.\n\nThis article explains how GitLab customers can leverage the GitLab platform to help satisfy relevant NIST SP 800-171 R2 requirements to achieve CMMC Level 2 compliance.\n\n### Access Control\n\n#### 3.1.1, 3.1.2, 3.1.4 - 3.1.8, 3.1.11 - 3.1.13, 3.1.15\n\nGitLab’s access management features broadly support CMMC access control requirements.\n\nGitLab’s [role-based access control (RBAC) model](https://docs.gitlab.com/ee/user/permissions.html) enables customers to limit access to authorized users, implement separation of duties, and ensure such users are only granted the permissions they require to perform their responsibilities.\n\nGitLab also supports [custom roles](https://docs.gitlab.com/ee/user/custom_roles.html) enabling organizations to craft roles that more accurately meet their needs.\n\nGitLab’s [audit events](https://docs.gitlab.com/ee/user/compliance/audit_events.html) capture different actions within GitLab, including administrative actions. With RBAC and audit events, organizations can prevent non-privileged users from performing administrative actions and log such actions when they do occur.\n\nTo address the National Institute of Standards and Technology (NIST) requirement for limiting unsuccessful logon attempts, GitLab addresses this in [a few different ways](https://docs.gitlab.com/ee/security/unlock_user.html) depending on the particular service offering a customer is subscribed to.\n\nBy default, GitLab implements limits on how long user sessions can remain valid without activity. Self-managed customers can configure this [setting](https://docs.gitlab.com/ee/administration/settings/account_and_limit_settings.html#customize-the-default-session-duration) to meet their organizational needs.\n\nGitLab secures data in transit through [encryption](https://docs.gitlab.com/ee/security/tls_support.html) and offers options for organizations to limit how their users connect to their GitLab namespace or instance.\nOrganizations can restrict access to their top level group by [IP address](https://docs.gitlab.com/ee/user/group/access_and_permissions.html), and GitLab Dedicated customers can take a step further by using [AWS PrivateLink](https://docs.gitlab.com/ee/administration/dedicated/#aws-privatelink-connection-optional) as a connection gateway.\n\n### Audit and Accountability\n\n#### 3.3.1, 3.3.2, 3.3.8, 3.3.9\n\nAs mentioned, GitLab [audit events](https://docs.gitlab.com/ee/user/compliance/audit_events.html) capture different actions within GitLab, including administrative actions. Audit events in GitLab are associated with an individual user responsible for the event, and the audit events themselves are immutable.\n\nFor organizations with a GitLab Ultimate license, [audit event streaming](https://docs.gitlab.com/ee/user/compliance/audit_event_streaming.html) enables them to set a streaming destination for their top-level group’s audit events. GitLab Self-managed (Ultimate) and GitLab Dedicated customers can utilize the same functionality for streaming their GitLab [instance audit events](https://docs.gitlab.com/ee/administration/audit_event_streaming/index.html) as well.\n\n### Configuration Management\n\n#### 3.4.1 - 3.4.3, 3.4.5\n\nGitLab’s [Create stage](https://about.gitlab.com/features/?stage=create) enables organizations to design, develop, and securely manage code and project data. Configurations for organizational systems can be stored, managed, and deployed leveraging GitLab’s [infrastructure as code features](https://about.gitlab.com/features/?stage=deploy#infrastructure_as_code).\n\nBy managing configuration changes through code, organizations can track the lineage of each change request. [Merge request approval rules](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/rules.html) enable organizations to enforce how many approvals a merge request must receive before it can be merged, and which users are authorized to approve such requests. The history of each request is retained and can be reviewed through git.\n\n![CMMC - Multiple approvals](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098221/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098221222.png)\n\n\u003Ccenter>\u003Ci>Multiple approval rules\u003C/i>\u003C/center>\n\n### Identification and Authentication\n\n#### 3.5.1 - 3.5.3\n\nGitLab supports SAML SSO integrations for [GitLab.com groups](https://docs.gitlab.com/ee/user/group/saml_sso/), [GitLab Dedicated, and Self-managed instances](https://docs.gitlab.com/ee/integration/saml.html). Organizations can further simplify their GitLab identity and access management (IAM) processes by configuring System for Cross-domain Identity Management (SCIM).\n\nGitLab also supports the use of [multi-factor authentication](https://docs.gitlab.com/ee/user/profile/account/two_factor_authentication.html), thereby enabling organizations to choose the IAM controls that fit their organizational needs.\n\n### Risk Assessment\n\n#### 3.11.2 - 3.11.3\n\nGitLab supports a powerful suite of scanning features to help create holistic and robust application development and supply chain management processes.\n\nGitLab enables organizations to discover vulnerabilities through [Static Application Security Testing (SAST)](https://about.gitlab.com/features/?stage=secure#static_application_security_testing), [Infrastructure as Code Security Scanning](https://docs.gitlab.com/ee/user/application_security/iac_scanning/), [Dynamic Application Security Testing (DAST)](https://about.gitlab.com/features/?stage=secure#dynamic_application_security_testing), [Container Scanning](https://about.gitlab.com/features/?stage=secure#container_scanning), and [Dependency Scanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/). \n\nDiscovered vulnerabilities on the default branch can be viewed in aggregate through GitLab’s [Vulnerability Report](https://docs.gitlab.com/ee/user/application_security/vulnerability_report/). From there, organizations can dive into each finding’s [Vulnerability page](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/) to [create issues](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/index.html#create-a-gitlab-issue-for-a-vulnerability) to track and discuss the vulnerability, and [resolve the vulnerability](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#resolve-a-vulnerability), either manually or via a merge request.\n\n![CMMC - Vulnerability report](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098221/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098221223.png)\n\nAdditionally, GitLab Duo’s [Vulnerability Explanation](https://docs.gitlab.com/ee/user/gitlab_duo/#vulnerability-explanation) feature can be leveraged to better understand discovered vulnerabilities, how they can be exploited, and how to fix them.\n\nTo go a step further, AWS recently [announced GitLab Duo with Amazon Q](https://about.gitlab.com/blog/gitlab-duo-with-amazon-q-devsecops-meets-agentic-ai/). Within a GitLab merge request, Amazon Q developer scans all changes looking for security vulnerabilities, quality issues such as code that doesn’t follow best practices, and any other potential problems with the code. After it’s finished, it will add each finding as a comment that includes a snippet of the problematic code found, a description of the issue, and a severity rating. Amazon Q with GitLab Duo will also recommend a code security fix.\n\n\u003Cdiv style=\"padding:56.25% 0 0 0;position:relative;\">\u003Ciframe src=\"https://player.vimeo.com/video/1033653810?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479\" frameborder=\"0\" allow=\"autoplay; fullscreen; picture-in-picture; clipboard-write\" style=\"position:absolute;top:0;left:0;width:100%;height:100%;\" title=\"GitLab Duo and Amazon Q\">\u003C/iframe>\u003C/div>\u003Cscript src=\"https://player.vimeo.com/api/player.js\">\u003C/script>\n\n### System and Information Integrity\n\n#### 3.14.1\n\nAs mentioned above, GitLab provides numerous features to identify vulnerabilities. Organizations can structure [scan execution policies](https://docs.gitlab.com/ee/user/application_security/policies/scan_execution_policies.html) to help ensure vulnerabilities are identified expediently when commits are pushed and on a regular schedule. Identified vulnerabilities can be [linked](https://docs.gitlab.com/ee/user/application_security/vulnerabilities/#link-a-vulnerability-to-existing-gitlab-issues) to issues for identified software flaws to support a more informed and unified remediation process.\n\nHere is an at-a-glance look at GitLab's companion features for CMMC Level 2:\n\n![Table of CMMC Level 2 compliance capabilities in GitLab](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098221/Blog/Content%20Images/Blog/Content%20Images/cmmctable_aHR0cHM6_1750098221225.png)\n\n### Learn more\n\nAs the most comprehensive AI-powered DevSecOps platform, GitLab enables its customers to meet a broad range of regulatory and compliance requirements through an extensive and rich feature set. You can dig deeper into these features with our [library of tutorials](https://docs.gitlab.com/ee/tutorials/).",[693,9,186],{"slug":3713,"featured":6,"template":698},"streamline-the-path-to-cmmc-level-2-compliance-with-gitlab","content:en-us:blog:streamline-the-path-to-cmmc-level-2-compliance-with-gitlab.yml","Streamline The Path To Cmmc Level 2 Compliance With Gitlab","en-us/blog/streamline-the-path-to-cmmc-level-2-compliance-with-gitlab.yml","en-us/blog/streamline-the-path-to-cmmc-level-2-compliance-with-gitlab",{"_path":3719,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3720,"content":3726,"config":3731,"_id":3733,"_type":14,"title":3734,"_source":16,"_file":3735,"_stem":3736,"_extension":19},"/en-us/blog/supercharge-productivity-with-gitlab-duo",{"title":3721,"description":3722,"ogTitle":3721,"ogDescription":3722,"noIndex":6,"ogImage":3723,"ogUrl":3724,"ogSiteName":685,"ogType":686,"canonicalUrls":3724,"schema":3725},"Supercharge productivity with generative AI and GitLab Duo","Learn how the GitLab DevSecOps platform leverages generative AI and large language models to enable organizations to boost productivity and efficiency.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674838/Blog/Hero%20Images/duo-blog-post.png","https://about.gitlab.com/blog/supercharge-productivity-with-gitlab-duo","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Supercharge productivity with generative AI and GitLab Duo\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Abubakar Siddiq Ango\"}],\n        \"datePublished\": \"2023-07-20\",\n      }",{"title":3721,"description":3722,"authors":3727,"heroImage":3723,"date":3728,"body":3729,"category":801,"tags":3730},[1538],"2023-07-20","\nIn GitLab's [2023 Global DevSecOps Report](https://about.gitlab.com/developer-survey/), developer productivity and operational efficiency were top of mind for most respondents and ranked as the two top benefits organizations strive for when adopting DevSecOps methodologies. At the same time, artificial intelligence (AI) and machine learning (ML) have become highly relevant to research and development teams in recent years and have gained increased importance in the software development process of many companies. Across the software development lifecycle, generative AI is enabling faster development cycles, less time spent on code reviews, and more secure software development practices.\n\nAt GitLab, we’re using generative AI and ML to deliver productivity and efficiency gains to DevSecOps teams throughout the software development lifecycle. In this blog post, we will look at how the suite of [GitLab Duo](https://about.gitlab.com/gitlab-duo/) AI capabilities boost productivity. First, let's understand the underlying technology: generative AI.\n\n## What is generative AI?\nGenerative AI is a branch of AI capable of generating new content. The generated content is created by using patterns learned from examples or input data during the training process. One of the most popular architectures used to learn from data is a [transformer model](https://blogs.nvidia.com/blog/what-is-a-transformer-model/). Given the success of transformer models in natural language processing (NLP) and their flexibility in being adapted or fine-tuned to different and more specific tasks, they have been called \"foundation models,\" a term coined by Stanford researchers in an [August 2021 research paper](https://arxiv.org/pdf/2108.07258.pdf).\n\nThese models, when trained on text data, are capable of learning context and meaning by tracking relationships in the input data. A successful application of transformer models is [large language models (LLMs)](https://www.nvidia.com/en-us/glossary/data-science/large-language-models/): algorithms trained with petabyte-scale, text-based data sets to recognize, predict, and generate various forms of content. Examples of LLMs include Google’s [PaLM 2](https://blog.google/technology/ai/google-palm-2-ai-large-language-model/), OpenAI’s [GPT series](https://openai.com/gpt-4) and Meta’s [Llama 2](https://ai.meta.com/llama/). According to [Ark Invest](https://ark-invest.com/big-ideas-2023/artificial-intelligence/), generative AI is expected to increase the productivity of knowledge workers more than fourfold by 2030.\n\nThe GitLab DevSecOps Platform leverages generative AI and LLMs to power [GitLab Duo](https://about.gitlab.com/gitlab-duo/), a suite of AI capabilities that enable organizations to boost efficiency and ship secure software faster. Now, let’s take a look at the capabilities of GitLab Duo.\n\n## Efficient & secure software development\n\nIn a research paper looking into the impact of AI on developer productivity published in February 2023, [Peng et al.](https://arxiv.org/abs/2302.06590) found that developers who used AI-powered tools completed their tasks 55.8% faster than those who didn’t. In this section, we'll look at how GitLab Duo can help teams reduce development time, improve developer productivity, and ensure secure software development.\n\n### Code Suggestions\nEfficient developer experience is critical to productivity, and GitLab Duo [Code Suggestions](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html) improves the developer experience and leads to shorter cycle times. If you want to import dependencies in your favourite programming language or create a function along with unit tests, Code Suggestions automatically provides suggestions in your IDE with a high degree of accuracy.\n\nCode Suggestions is currently available in Beta for all tiers on GitLab.com and in self-managed GitLab from Version 16.1. You can [use Code Suggestions with the GitLab WebIDE, Visual Studio Code](https://docs.gitlab.com/ee/user/project/repository/code_suggestions.html), and [other IDEs](https://about.gitlab.com/blog/extending-code-suggestions/).\n\n### Code explanations\nDevelopers spend a ton of time on search engines trying to figure out what a block of code does or why it behaves the way it does. The [\"explain this code\"](https://docs.gitlab.com/ee/user/ai_features.html#explain-selected-code-in-the-web-ui) feature is currently an [experiment](https://docs.gitlab.com/ee/policy/experiment-beta-support.html#experiment) on GitLab.com and uses LLMs to explain code in natural language, including context based on the code selected for it to explain.\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/xzsFfFqvlnU\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n### AI-generated tests for code changes\nEnsuring code changes are thoroughly tested is hard work and can be time-consuming. In your merge requests, with generative AI, you can generate test files for your code to provide coverage for the change introduced, reducing the time spent writing tests.\n\n![Merge request test coverage with AI](https://about.gitlab.com/images/blogimages/merge-request-generate-tests-ai.gif)\n\n### Vulnerability explanations\nA critical part of preventing the escalation of vulnerabilities is understanding why vulnerabilities were discovered and how to fix them. This takes much effort to research before making an informed decision on the next action step. [GitLab Duo vulnerability recommendations](https://about.gitlab.com/blog/explain-this-vulnerability/) provide detailed information on identified vulnerabilities, including context from your code, how they can be exploited, and example fixes, thus allowing quick remediation of vulnerabilities.\n\n![Explain this vulnerability](https://about.gitlab.com/images/blogimages/2023-04-27-explain-this-vulnerability.png)\n\n### Value stream forecasting\n\nThe efficiency of the software development lifecycle is critical to a team’s productivity and quality of value delivery. It is critical for software leaders to identify trends from events occuring in the lifecycle and implement changes to improve efficiency. GitLab’s [Value Streams Dashboard](https://docs.gitlab.com/ee/user/analytics/value_streams_dashboard.html) allows teams to track software performance and the flow of value across the software development lifecycle with [DORA metrics](https://docs.gitlab.com/ee/user/analytics/dora_metrics.html), [value stream analytics](https://docs.gitlab.com/ee/user/group/value_stream_analytics/index.html), and [vulnerability reports](https://gitlab.com/gitlab-org/gitlab/-/security/vulnerability_report).\n\n## Improved code review processes\n\n[In a 2019 survey](https://static1.smartbear.co/smartbearbrand/media/pdf/the-2019-state-of-code-review.pdf), 55% of respodents were either indifferent to or dissatisfied by their team’s code review process, and this is largely due to code reviews being seen as added work ([necessary but often time-consuming](https://codeclimate.com/blog/time-wasting-code-review)). In this section, we'll look at how GitLab Duo can help reduce review cycles and increase speed.\n\n### Merge request change summary\n\nOnce your changes are ready for review, you create a merge request, which allows you to collaborate with other stakeholders. Merge request descriptions often give the context on why the changes were made, but sometimes don’t include details of the changes themselves, except if you look at the code. Leveraging AI and LLMs, GitLab can provide relevant summaries of merge requests, reducing time spent ensuring the merge request description is up to date as changes evolve. All you need to do is use the `/summarize_diff` [quick action](https://docs.gitlab.com/ee/user/project/quick_actions.html) to add a summary of changes in a comment.\n\n![Merge request changes summary with AI](https://about.gitlab.com/images/blogimages/merge-request-changes-summary-ai.gif)\n\n### Issue comment summary\n\nWhen planning work, discussion is an essential element of collaboration. Discussions can become lengthy and catching up on all the comments can be a challenge, especially in an organization with an asynchronous culture. Similar to summarizing merge request changes, GitLab can [summarize issue comments](https://about.gitlab.com/blog/summarize-issues/), which is valuable in efficiently understanding the status of work from the issue discussions and the next steps to take. Here is a video of how it works:\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/GMr3eHwbYAI\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n### Suggested reviewers\nIdentifying reviewers can be time-consuming, especially with cross-functional collaborations in a large team. When enabled by project maintainers or owners, GitLab [suggests reviewers](https://docs.gitlab.com/ee/user/project/merge_requests/reviews/index.html#suggested-reviewers) for a merge request based on the code changes made and the project’s contribution graph. This leads to higher-quality feedback from team members with domain knowledge and increases the speed of reviews.\n\n### Merge request review summary\nCode reviewers often leave feedback across several parts of the changes they’ve reviewed, then proceed to write a separate comment to give an overview of all the feedback given. The reviewer spends valuable time ensuring the merge request author has the context to understand the review. With GitLab Duo, the reviewer can simply use the [\"summarize my code review\"](/blog/summarize-my-merge-request-review/) capability to provide authors with context around the feedback, without the need to process the entire review.\n\n![Summarize code review](https://about.gitlab.com/images/blogimages/summarize-my-merge-request-review-ai.gif)\n\n## What's coming next\nGitLab is continuously iterating on our [AI-assisted capabilities](https://about.gitlab.com/blog/ai-ml-in-devsecops-series/) to find innovative ways to enable you to efficiently build more secure software faster, while putting privacy first in a single application that gives every stakeholder visibility. This includes AI [experiments](https://docs.gitlab.com/ee/policy/experiment-beta-support.html#experiment) such as [GitLab Duo Chat](https://docs.gitlab.com/ee/user/ai_features.html#gitlab-duo-chat), which uses generative AI to answer product-specific questions about GitLab, reducing time spent reviewing the GitLab documentation. Another experiment is the \"fill in merge request description\" capability, which uses AI and content from the proposed changes to fill in a merge request decription template.\n\n![Summarize MR Descriptions](https://about.gitlab.com/images/blogimages/summarize-mr-description.gif)\n\nGitLab Duo’s AI-assisted workflows enable teams in every phase of the software development lifecycle to deliver secure software faster with increased efficiency and reduced cycle times. Learn more about [GitLab Duo here](https://about.gitlab.com/gitlab-duo/).\n",[951,9,803],{"slug":3732,"featured":6,"template":698},"supercharge-productivity-with-gitlab-duo","content:en-us:blog:supercharge-productivity-with-gitlab-duo.yml","Supercharge Productivity With Gitlab Duo","en-us/blog/supercharge-productivity-with-gitlab-duo.yml","en-us/blog/supercharge-productivity-with-gitlab-duo",{"_path":3738,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3739,"content":3744,"config":3751,"_id":3753,"_type":14,"title":3754,"_source":16,"_file":3755,"_stem":3756,"_extension":19},"/en-us/blog/the-gitlab-ai-security-framework-for-security-leaders",{"title":3740,"description":3741,"ogTitle":3740,"ogDescription":3741,"noIndex":6,"ogImage":683,"ogUrl":3742,"ogSiteName":685,"ogType":686,"canonicalUrls":3742,"schema":3743},"The GitLab AI Security Framework for security leaders","Discover how GitLab Duo's security controls, third-party integrations, and retention policies help teams safely implement AI into their development workflow.","https://about.gitlab.com/blog/the-gitlab-ai-security-framework-for-security-leaders","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"The GitLab AI Security Framework for security leaders\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Kyle Smith\"},{\"@type\":\"Person\",\"name\":\"Ayoub Fandi\"}],\n        \"datePublished\": \"2025-03-04\",\n      }",{"title":3740,"description":3741,"authors":3745,"heroImage":683,"date":3748,"body":3749,"category":801,"tags":3750},[3746,3747],"Kyle Smith","Ayoub Fandi","2025-03-04","As companies rapidly adopt AI technologies, CISOs face a new frontier of security challenges. Many security leaders find themselves grappling with unfamiliar questions: How do we evaluate AI vendors differently from traditional software vendors? What security controls matter most? Where does vendor responsibility end and customer responsibility begin? How do we evaluate AI security risks within the context of the service provided? To help answer these questions, we’ve created the [GitLab AI Security Framework](https://trust.gitlab.com/?itemUid=ad3d92c1-889e-49fc-b19c-2434f70071ee&source=click) to show security leaders how GitLab and customers enable secure AI-powered development using GitLab Duo.\n\n## The genesis of AI security challenges\n\nFrom conversations with security leaders across industries a pattern has emerged: Organizations are rapidly embracing AI technologies to improve delivery while their security teams struggle to establish appropriate security controls. \n\nThis disconnect isn't just a matter of resources or expertise – it represents a fundamental shift in how organizations need to approach security in the AI era. Security leaders are witnessing quick and unprecedented adoption of AI across their organizations, from development teams using coding assistants to marketing departments leveraging generative AI. \n\nWhile organizations are integrating AI within their own software, many of their current vendor-provided SaaS applications have added AI capabilities as well. Although this adoption drives innovation and efficiency, it also creates a complex set of security considerations that traditional frameworks weren't designed to address. Below are some of the specific challenges we’ve identified.\n\n## Security challenges in the AI era\n\n**1. Responsibility and control uncertainty**\n\nThe rapid pace of AI adoption has left many organizations without a coherent security governance strategy. Security teams find themselves trying to retrofit existing security frameworks to address AI-specific concerns. Security leaders face challenges in understanding where their responsibilities begin and end when it comes to AI security. The traditional vendor-customer relationship becomes more complex with AI systems, as data flows, model training, and inference processes create new types of interactions and dependencies. \n\n**2. Risk assessment evolution**\n\nTraditional security risk models struggle to capture the unique characteristics of AI systems. Security leaders are finding that standard risk assessment frameworks don't adequately address AI-specific risks. AI security risks will differ based on AI implementation and the context in which it’s used. The challenge is compounded by the need to evaluate AI vendors without necessarily having deep technical AI expertise on the security team.\n\n**3. Data protection complexities**  \nAI systems present unique challenges for data protection. The way these systems process, learn from, and generate data creates new privacy and security considerations that organizations should carefully evaluate. CISOs must ensure their data governance frameworks evolve to address how AI systems use and protect sensitive information. AI implementations with inadequate safeguards might inadvertently reveal protected information via AI generated outputs.\n\n**4. Compliance and standards navigation**  \nThe regulatory landscape for AI security is rapidly evolving, with new standards like ISO 42001 and others emerging alongside existing frameworks. Security leaders must navigate this complex environment while ensuring their AI implementations remain compliant with both current and anticipated regulations. This requires a delicate balance between enabling AI adoption and maintaining robust security controls that satisfy regulatory requirements.\n\n## Addressing these challenges  \nWith the release of [GitLab Duo](https://about.gitlab.com/gitlab-duo/), we recognized these executive-level concerns and developed a comprehensive framework to help organizations navigate AI security in the context of our AI-powered DevSecOps platform. Our AI Security Framework provides details on our privacy-first implementation of AI to enable GitLab Duo, and how we validate the security of our AI vendors. A responsibility matrix is included to help security leaders manage their AI security responsibilities while enabling their organizations to innovate safely. We also compiled a selection of AI-specific security risks to keep in mind and highlighted how GitLab capabilities like [prompt guardrails](https://about.gitlab.com/blog/how-gitlab-uses-prompt-guardrails-to-help-protect-customers/) can help in mitigating them. \n\n> Want a deeper look at our security controls? Check out our [AI Security Framework](https://trust.gitlab.com/?itemUid=ad3d92c1-889e-49fc-b19c-2434f70071ee&source=click).\n\n## Learn more\n- [GitLab AI Transparency Center](https://about.gitlab.com/ai-transparency-center/)\n- [How GitLab uses prompt guardrails to help protect customers](https://about.gitlab.com/blog/how-gitlab-uses-prompt-guardrails-to-help-protect-customers/)\n- [Improve AI security in GitLab with composite identities](https://about.gitlab.com/blog/improve-ai-security-in-gitlab-with-composite-identities/)\n- [Secure, compliant, and AI-powered: Get to know 3 new GitLab features](https://about.gitlab.com/blog/secure-compliant-and-ai-powered-get-to-know-3-new-gitlab-features/)\n- [ICYMI: Key AI and security insights from our developer community](https://about.gitlab.com/blog/icymi-key-ai-and-security-insights-from-our-developer-community/)",[803,695,9,693],{"slug":3752,"featured":91,"template":698},"the-gitlab-ai-security-framework-for-security-leaders","content:en-us:blog:the-gitlab-ai-security-framework-for-security-leaders.yml","The Gitlab Ai Security Framework For Security Leaders","en-us/blog/the-gitlab-ai-security-framework-for-security-leaders.yml","en-us/blog/the-gitlab-ai-security-framework-for-security-leaders",{"_path":3758,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3759,"content":3765,"config":3771,"_id":3773,"_type":14,"title":3774,"_source":16,"_file":3775,"_stem":3776,"_extension":19},"/en-us/blog/the-ultimate-guide-to-enabling-saml",{"title":3760,"description":3761,"ogTitle":3760,"ogDescription":3761,"noIndex":6,"ogImage":3762,"ogUrl":3763,"ogSiteName":685,"ogType":686,"canonicalUrls":3763,"schema":3764},"The ultimate guide to enabling SAML and SSO on GitLab.com","Learn how to make full use of SAML and SSO security features on the GitLab DevSecOps platform.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749666495/Blog/Hero%20Images/cover-1800x945.png","https://about.gitlab.com/blog/the-ultimate-guide-to-enabling-saml","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"The ultimate guide to enabling SAML and SSO on GitLab.com\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Bradley Lee\"}],\n        \"datePublished\": \"2023-09-14\",\n      }",{"title":3760,"description":3761,"authors":3766,"heroImage":3762,"date":3768,"body":3769,"category":693,"tags":3770},[3767],"Bradley Lee","2023-09-14","\nAs a follow-on to the recent blog, [The ultimate guide to securing your code on\nGitLab.com](https://about.gitlab.com/blog/securing-your-code-on-gitlab/),\nwe recommended enabling SAML (Security Assertion Markup Language) and SSO (single\nsign-on) to enable tighter control over code access. Let’s take a deep dive into\nhow to enable SAML and SSO on GitLab.com.\n\n## What are SAML and SSO?\nSAML is an open standard, which service providers (like GitLab.com) and\nidentity providers (commonly referred to as IdPs) use to communicate\nauthentication data. SSO is provided by IdPs, such as Okta and Entra ID\n(formerly Azure AD), and enables users to log into multiple systems or service\nproviders through a single interface with a single set of credentials.\n\nAs with any configuration, there should be thoughtful and careful planning when\nenabling SSO.\n\n### What are the benefits of SSO?\nIn general, enabling SSO streamlines the user experience by unifying the login\nprocess and reducing the account and password bloat required for multiple\nenterprise applications. Enabling SSO also adds an extra layer of security and\nmanagement efficiency for identity management teams by providing a single\nsource of truth for authentication. Below, you’ll learn how SAML SSO applies\nspecifically to GitLab.com.\n\n## Configuring SSO and SAML for GitLab.com\nPremium and Ultimate tiers can enable SSO in the settings available at the\nnamespace or top level group.\n\n### Enabling SSO at the group level\nBefore getting started, you’ll need a few key\npieces of information from your chosen IdP:\n- The IdP SSO URL\n- The certificate fingerprint provided by the IdP application\n\nOnce these key pieces are entered, check the “Enable SAML\nauthentication for this group” box. \n\n### How user accounts are linked\nBefore we proceed further into configuration, let’s take a look at how GitLab\nauthenticates against the IdP.\n\nFor GitLab.com, each user who requires access to\nthe system must have an account on GitLab.com. By default, when a user first\nattempts logging into GitLab via SSO, GitLab will receive the SAML assertion\nand validate if the identity (specifically the email address) is linked to a\nGitLab.com account. If not, GitLab will request the user either login to an\nexisting account or create a new account. In most instances, this may not be\ndesired behavior; however, we will address this later in the process. We’ve\nprovided a flowchart below to help you navigate the provisioning flow.\n\n![image of saml group links](https://about.gitlab.com/images/blogimages/2023-09-14-ultimate-guide-to-enabling-saml/saml-provisioning.png)\n\n### Enforcing SSO\nTo further increase security, there are two options available for enforcing\nSSO. Assuming neither are checked, users with access to the namespace can log\nin with either the SSO credentials or the GitLab.com credentials.\n\nHere is a working example that we can use to follow along as we discuss how the\nconfiguration options affect our baseline. Let’s consider a user in the IdP\nwhere the username is `idpusername` and contains a super secret password:\n`idppassword` (apologies, security professionals). Taking into account the\ninformation we just learned about account linking, let us also assume our demo\nuser created a new account following the prompt from an SSO login with a\nusername of `gitlabusername` and `gitlabpassword` as an even more secure\npassword.\n\n#### Enforcing SSO only for web\nWhen enabling the “Enforce SSO-only authentication for web activity for this\ngroup” setting, all members must now access all groups and projects under the\nhierarchy using the configured SSO login regardless of whether they have an\nexisting SAML identity. As we mentioned prior, with this flag disabled, our\n`idpusername` user will be able to log into the GitLab namespace with either\nthe `idpusername` or `gitlabusername` credential sets. When we enable this\nsetting for web-based activity ([further details in\ndocs](https://docs.gitlab.com/ee/user/group/saml_sso/#sso-only-for-web-activity-enforcement)),\nour group is now only accessible by the `idpusername` credential set.\n\n#### Enforcing SSO only for Git proxy\nVery similar to enforcing SSO for web, when the “Enforce SSO-only\nauthentication for Git and Dependency Proxy” activity for this group option is\nenabled, a few things happen:\n- Calling an API endpoint that involves Git activity requires SSO.\n- For Git activity over SSH and HTTPS, users must have at least one active session signed-in through SSO before they can push to or pull.\n\nThere is a strong recommendation to enable both of these settings to take full\nadvantage of the benefits of SSO for users and administrators through\ncentralized authentication.\n\n### Enterprise user support\nNow that we know how some of the configuration options can help secure access,\nlet’s take a deeper dive into user management. Consider the following scenario:\nOur `idpusername` user has decided to pursue another opportunity outside of the\ndomain. Based on what we have configured now, once the account has been\ndeprovisioned from the IdP, it should no longer have access to anything secured\nbehind it on GitLab.com. However, while the user will not have access, the\nassociated user ID and roles still remain until manually removed. This is where\nEnterprise users come in.\n\n#### What are Enterprise users in GitLab?\nIf you look closely, any user that has a linked SSO account will carry a `SAML`\nbadge in the member list. GitLab also has an associated `Enterprise` badge\nthat grants additional management functionality through SSO. For a user to\ncarry the `Enterprise` badge, the user must either have the initial GitLab.com account creation initiated by a SAML SSO login or have the initial GitLab.com account created by SCIM.\n\n#### What is SCIM?\nSCIM, or System for Cross-domain Identity Management, is another standard\nused in conjunction with SAML, primarily for provisioning and deprovisioning\nacross multiple systems. By enabling SCIM for your GitLab.com group (which is\ncurrently supported with Entra ID and Okta), you can enable automatic\nprovisioning and deprovisioning of accounts.\n\nIf we look back at some of our scenarios, without SCIM, our `idpusername` user\nwas prompted to create or link a GitLab.com account on first login. With SCIM\nenabled, this process is handled automatically based on information provided\nand managed by the IdP and is completely transparent to the end user. The\nsecond half of our scenario, where our `idpusername` user is deprovisioned from\nthe IdP, also is solved with automation via SCIM. In this instance, when the\nuser is removed on the IdP side, SCIM automatically disconnects the SAML\nidentity from the GitLab.com account and removes the user from the GitLab.com\ngroup.\n\n#### Protecting your intellectual property\nAnother important feature of Enterprise users is the ability to control two\nvery important user settings that are not accessible to group administrators on\nGitLab.com. Since all users require an account on GitLab.com, they are also\ngranted access to a personal user namespace. For example, our `idpusername` will have access to our Acme Corp. group at `.com/acmecorp`, and will also have\naccess to their own personal space at `.com/idpusername`. One common concern with this is the ability for users to take code out of the organization\nnamespace and commit to their own personal namespace.\n\nWith Enterprise users, we have two settings that we can control based on attributes received in the SAML\nresponse. These keys are `projects_limit` and `can_create_group`. The\n`projects_limit` is an integer value that sets the amount of projects a user\ncan create in their personal namespace. When set to `0`, this effectively\ndisables project creation in that space. Similarly, `can_create_group` is a\nboolean `true` or `false` value that indicates whether a user can create new\ngroups.\n\n### Managing roles with SAML\nNow that we know the ins and outs of creating and removing users with SAML and\nSCIM, how can we leverage our work to help manage our active users? In this\nfinal section, we’ll take a look at why we recommend setting default membership\nto \"Minimal Access\" and how to leverage group memberships in the IdP.\n\n#### Why Minimal Access?\nIn the [Ultimate guide to securing your code on GitLab](https://about.gitlab.com/blog/securing-your-code-on-gitlab/),\nwe recommend setting the default membership role to Minimal Access, and\noperating with the concept of least privilege. Roles can be elevated as needed\nin subgroups or individual projects while preventing visibility to projects or\nsubgroups where the user is not explicitly granted another role. By default,\nthis option is set to Guest, which will allow all provisioned users guest\naccess to the repositories. Default membership controls are available at the\ntop-level group, along with the SAML and SSO settings. For automation at the\nsubgroup level, we can leverage SAML Group Sync.\n\n#### Configuring SAML Group Sync with SAML Group Links\nBefore we dive into the configuration, there is one very important step we need\nto take. The configured SAML assertion that is sent MUST include an attribute\nnamed `Groups` or `groups`. If SAML Group Links are present without the\nattribute in the assertion, users may be removed from the group or reverted to\nMinimal Access.\n\nAfter we ensure our assertions contain the necessary information, we can start\nusing SAML Group Links to automatically assign membership roles to GitLab\ngroups based on group membership in the IdP. Let’s build on our demo user\n`idpusername` by considering the following:\n- `idpusername` is a maintainer on the acme-web project.\n- The `acme-web` project exists under the `acme-corp` namespace, under subgroup `acme-com`.\n- The full path to the project would be `.com/acme-corp/acme-com/acme-web`.\n- `idpusername` should also be granted developer access for the `acme-db` project, which is also under the `acme-com` group.\n- In our IdP, `idpusername` is a member of the IdP group `idp-acme-com`.\n\nSAML group links allow us to map IdP group memberships to role assignments at\nthe GitLab group level. In this scenario, we can create a group link at the\n`acme-com` group in GitLab that maps the IdP group `idp-acme-com` to the\ndeveloper role to the `acme-com` group.\n\nDue to inheritance, our `idpusername`\nuser will be granted developer access and associated visibility to every\nproject and group that falls under the GitLab `acme-com` group automatically by\nvirtue of the IdP group membership, because we’re working under the concept of\nleast privilege for the `acme-web` project.\n\nThe `idpusername` user’s role can\nbe elevated to maintainer directly in the project. From a user perspective,\n`idpusername` would still carry the Minimal Access role at the `acme-corp`\ngroup as well. This allows a separation of access management between\nengineering and identity management teams and allows role management to be\nflexible with guardrails.\n\n![image of saml group links](https://about.gitlab.com/images/blogimages/2023-09-14-ultimate-guide-to-enabling-saml/saml-group-links.png)\n\nWith this approach, it’s important to find that balance between what is managed\nin the IdP and what is managed in GitLab. It’s possible to have hundreds of\ngroup mappings to roles in the IdP and almost completely remove role management\nwithin GitLab and vice versa. The flexibility that GitLab allows enables you to\nfind the best solution that works for you. Building on our example, if we hire\nanother engineer for the `acme-com` project, they can be added to the GitLab\napplication in the IdP, and added to the `idp-acme-com` group. This\nautomatically assigns them the developer role at the `acme-com` group and for\nall projects under it, while limiting access to any other groups outside of\n`acme-com` in the namespace.\n\n## Learn more\nWe’ve covered how to get started with enabling SAML and SSO on your GitLab.com\ngroup, along with how to leverage the features to programmatically manage users\nand roles with real examples. For more information, see the full [SAML SSO for\nGitLab.com groups](https://docs.gitlab.com/ee/user/group/saml_sso/)\ndocumentation.\n\nCover image by [Towfiqu barbhuiya](https://unsplash.com/photos/FnA5pAzqhMM) on [Unsplash](https://unsplash.com)\n{: .note}\n",[693,719,9],{"slug":3772,"featured":6,"template":698},"the-ultimate-guide-to-enabling-saml","content:en-us:blog:the-ultimate-guide-to-enabling-saml.yml","The Ultimate Guide To Enabling Saml","en-us/blog/the-ultimate-guide-to-enabling-saml.yml","en-us/blog/the-ultimate-guide-to-enabling-saml",{"_path":3778,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3779,"content":3785,"config":3791,"_id":3793,"_type":14,"title":3794,"_source":16,"_file":3795,"_stem":3796,"_extension":19},"/en-us/blog/the-ultimate-guide-to-token-management-at-gitlab",{"title":3780,"description":3781,"ogTitle":3780,"ogDescription":3781,"noIndex":6,"ogImage":3782,"ogUrl":3783,"ogSiteName":685,"ogType":686,"canonicalUrls":3783,"schema":3784},"The ultimate guide to token management at GitLab","Learn all the steps in the end-to-end process of identifying, managing, and securing tokens for improved security across the software development lifecycle.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097408/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_1097303277_6gTk7M1DNx0tFuovupVFB1_1750097407860.jpg","https://about.gitlab.com/blog/the-ultimate-guide-to-token-management-at-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"The ultimate guide to token management at GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Hakeem Abdul-Razak\"}],\n        \"datePublished\": \"2025-02-25\",\n      }",{"title":3780,"description":3781,"authors":3786,"heroImage":3782,"date":3788,"body":3789,"category":693,"tags":3790},[3787],"Hakeem Abdul-Razak","2025-02-25","Imagine this: You are an engineer at a growing tech company, and it’s 2 a.m. when you get an urgent call. A critical deployment pipeline has failed, and your team is scrambling to figure out why. After hours of digging, you realize someone revoked a personal access token belonging to an engineer who left the company a week ago. This token was tied to several key automation processes, and now your entire system is in chaos. How do you make sure it does not happen again?\n\nFollow this guide, which takes GitLab customers through the end-to-end process of identifying, managing, and securing their tokens. It is meant to be a handy supplement to the extensive [token overview documentation](https://docs.gitlab.com/ee/security/tokens) for GitLab administrators, developers, and security teams who need to ensure proper token management within their projects.\n\nHere's what is covered in this guide:\n- [How to select the right token for the job](#how-to-select-the-right-token-for-the-job)\n- [Token types](#token-types)\n- [Discovering your tokens](#discovering-your-tokens)\n    - [Credentials inventory](#credentials-inventory)\n- [Managing tokens in the GitLab UI and API](#managing-tokens-in-the-gitlab-ui-and-api)\n- [Token rotation and expiration management](#token-rotation-and-expiration-management)\n- [Token management best practices](#token-management-best-practices)\n    - [Service accounts](#service-accounts)\n\n## How to select the right token for the job\n\nChoosing the right token ensures optimal security and functionality based on your use case. \nTokens can be used for authenticating API requests, automating CI/CD pipelines, integrating third-party tools, managing deployments and repositories, and more.\n\n![Token management guide - flow chart for tokens](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097435/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097434869.png)\n\nFor the sake of simplicity, the chart illustrates a straightforward use case tied to single user ownership. For more information, check out our documentation of user roles and permissions at each [namespace](https://docs.gitlab.com/ee/user/permissions.html) (user/group) in your instance or top-level group. Example use cases could be as follows: \n\n- **Personal access tokens** ([PAT](https://docs.gitlab.com/user/profile/personal_access_tokens/#personal-access-token-scopes)) can be used by developers when a user's personal access and permissions are required. In this case, the credentials follow the status and permissions of the user, including the removal of access if the account loses access to a specific project or group (or is blocked entirely).   \n- **Project/group access tokens** ([PrAT](https://docs.gitlab.com/user/project/settings/project_access_tokens/#scopes-for-a-project-access-token)/[GrAT](https://docs.gitlab.com/user/group/settings/group_access_tokens/#scopes-for-a-group-access-token)) are recommended when access should be scoped to resources within a specific project/group, allowing anyone with a PrAT/GrAT to access those resources through mechanisms managed by assigned scopes.\n\n## Token types\n\nBelow is a list of GitLab tokens with their default prefixes and use cases. For more information, please visit the [GitLab Token overview page](https://docs.gitlab.com/ee/security/tokens/#available-scopes). \n\n| Tokens | Prefix  | Description |\n| :---: | :---: | :---: |\n| Personal access token | glpat | Access user-specific data |\n| OAuth 2.0 token |  gloas | Integrate with third-party applications using OAuth2.0 authentication protocol |\n| Impersonation token | glpat | Act on behalf of another user for administrative purposes |\n| Project access token | glpat | Access data from a specific project |\n| Group access token | glpat |  Access data from a specific group |\n| Deploy token | gldt |  Clone, push, and pull container registry images of a project without a user and a password |\n| Deploy keys | N/A | Allow read-only or read-write access to your repositories |\n| Runner authentication token | glrt | Authenticate GitLab Runners |\n| CI/CD job token  | glcbt | Automate CI/CD processes |\n| Trigger token | glptt | Trigger pipelines manually or programmatically |\n| Feed token | glft | Authenticate access to package/RSS feeds |\n| Incoming mail token  | glimt | Process incoming emails |\n| GitLab agent for Kubernetes token | glagent | Manage Kubernetes clusters via the GitLab agent |\n| SCIM tokens | glsoat | Enable SCIM integrations for user provisioning |\n| Feature flags client token | glffct | Enable feature flags programmatically |\n| Webhook token | N/A | User set secret token to secure webhook payloads and ensure that the requests are from GitLab |\n\n## Discovering your tokens\n\n### Credentials inventory\n\nOn GitLab Ultimate, administrators (GitLab Self-Managed) and top-level group owners of an enterprise organization (GitLab.com as of Version 17.5) can monitor the credentials in their namespace.\n\nThis inventory tracks token details such as:\n\n* Token type  \n  * Available tokens on [GitLab.com](https://docs.gitlab.com/ee/user/group/credentials_inventory.html)  \n  * Available tokens on [GitLab Self-Managed](https://docs.gitlab.com/ee/administration/credentials_inventory.html)  \n* Associated user accounts  \n* Token scopes, and creation and expiration dates  \n* Token last used IP addresses (as of GitLab 17.10)  \n* Token filtration based on the above user-defined parameters  \n* Ability to revoke and rotate those tokens\n\nA well-maintained credentials inventory helps identify over-permissioned tokens, and gives insight into credentials that may need to be rotated, ensuring a secure and efficient workflow.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/A9ONfnwswd0?si=4VIEUgJaD4daj81b&amp;start=105\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n#### Credentials inventory API\n\nAs a complement to the UI, there is [ongoing development](https://gitlab.com/groups/gitlab-org/-/epics/16343) to release a credentials inventory API through the new /group/:id/manage [endpoint](https://docs.gitlab.com/ee/api/members.html#list-all-members-of-a-group-or-project). The credentials accessible under this endpoint are limited to enterprise [users](https://docs.gitlab.com/ee/user/enterprise_user/), and can be accessed by the top-level group owner of an enterprise organization. An example of the future API call would be:\n\n```console\ncurl --header \"PRIVATE-TOKEN: \u003Cpat>\" \"https://verified_domain.com/api/v4/groups/\u003Cgroup_id>/manage/personal_access_tokens\"           \n```\n### GitLab API\n\nThe GitLab API allows you to programmatically list and manage tokens within your organization. Key authentication-related endpoints support [various token types](https://docs.gitlab.com/ee/api/rest/authentication.html)), including personal, group, CI/CD tokens, and more. An example of using a personal access token to list all visible projects across GitLab for the authenticated user is:\n\n```console\ncurl --header \"PRIVATE-TOKEN: \u003Cyour_access_token>\" \\\n     \"https://gitlab.example.com/api/v4/projects\"\n\n```\n\nWatch this video to learn how to make API calls to the GitLab API.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/0LsMC3ZiXkA?si=vj871YH610jwQdFc\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### Finding where tokens are used\n\nCustomers can find where tokens are used in different ways:\n* under **User Profile > [Access Tokens](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#view-the-time-at-and-ips-where-a-token-was-last-used)**\n* in credentials inventory\n* in audit events\n* via the API \n\nInformation on token usage is updated every 10 minutes for **last_used** and every minute for **last_used_ip**. \n\nThe ability to view IP addresses was introduced in GitLab 17.9, and is controlled by the **:pat_ip** feature flag. Follow these [steps to view the last time a token was used](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#view-the-time-at-and-ips-where-a-token-was-last-used), along with its last five distinct IP addresses.\n\n![Token management guide - personal access tokens settings](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097435/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097434870.png)\n\n## Managing tokens in the GitLab UI and API\nThe following table includes videos detailing a few token creations in the UI and demonstrates their usage via the API.\n\n| Tokens     | GitLab UI    | GitLab API    |\n| ---------- | ---------- | ---------- |\n| Personal access token | [Documentation](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#create-a-personal-access-token) and [video](https://youtu.be/v5Nj3Jy4vaI?t=3)  | [Documentation](https://docs.gitlab.com/ee/api/personal_access_tokens.html) and [video](https://youtu.be/v5Nj3Jy4vaI?t=43)  |\n| Group access token | [Documentation](https://docs.gitlab.com/ee/user/group/settings/group_access_tokens.html#group-access-tokens) and [video](https://youtu.be/v5Nj3Jy4vaI?t=120)  | [Documentation](https://docs.gitlab.com/ee/api/group_access_tokens.html) and [video](https://youtu.be/v5Nj3Jy4vaI?t=157)  |\n| Project access token | [Documentation](https://docs.gitlab.com/ee/user/project/settings/project_access_tokens.html#project-access-tokens) and [video](https://youtu.be/v5Nj3Jy4vaI?t=254)  | [Documentation](https://docs.gitlab.com/ee/api/project_access_tokens.html) and [video](https://youtu.be/v5Nj3Jy4vaI?t=285)  |\n\n## Token rotation and expiration management\n\nImplementing token rotation and strict expiration policies reduces the risk of compromise and ensures compliance with security standards. Regular rotation and enforced expirations prevent stale credentials from becoming security vulnerabilities.\n\nPreviously, expired group and project access tokens were automatically deleted upon expiration, which made auditing and security reviews more challenging due to the lack of a record of inactive tokens. To address this, a [recent feature](https://gitlab.com/gitlab-org/gitlab/-/issues/462217) introduced the retention of inactive group and project access token records in the UI for 30 days after they became inactive. This enhancement aims to allow teams to track token usage, expiration, and revocation for better compliance and monitoring.\n\nTo be more proactive in your token rotation and expiration management, do the following: \n\n* Actively rotate your tokens via the UI or API. If you use the latter, be mindful of the [automatic token reuse detection](https://docs.gitlab.com/ee/api/personal_access_tokens.html#automatic-reuse-detection) security mechanism.  \n* Set an instance-wide [maximum lifetime limit](https://docs.gitlab.com/ee/administration/settings/account_and_limit_settings.html#limit-the-lifetime-of-access-tokens) for access tokens. \n\n### Token rotation API\n\nUntil GitLab 17.7, customers had to programmatically rotate access tokens with the API. Its counterpart is now available on the UI. Check out the video in the table below or follow the [documentation](https://docs.gitlab.com/ee/user/project/settings/project_access_tokens.html#use-the-ui) for guidance.\n\n### Token rotation snippets\n\nThe following table includes videos detailing the rotation of GitLab tokens. \n\n| Tokens | Prerequisites | GitLab UI | GitLab API |\n| :---: | :---: | ----- | ----- |\n| Personal access token | Scope: api\u000b | [Documentation](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#create-a-personal-access-token) and [video](https://youtu.be/v5Nj3Jy4vaI?t=76)  | [Documentation](https://docs.gitlab.com/ee/api/personal_access_tokens.html#rotate-a-personal-access-token) and [video](https://youtu.be/v5Nj3Jy4vaI?t=92)  |\n| Group access token | Scope: api and Role(s): owner | [Documentation](https://docs.gitlab.com/ee/user/group/settings/group_access_tokens.html#create-a-group-access-token-using-ui) and [video](https://youtu.be/v5Nj3Jy4vaI?t=203)  | [Documentation](https://docs.gitlab.com/ee/api/group_access_tokens.html) and [video](https://youtu.be/v5Nj3Jy4vaI?t=214)  |\n| Project access token | Scope: api and Role(s): owner, maintainer | [Documentation](https://docs.gitlab.com/ee/user/project/settings/project_access_tokens.html#create-a-project-access-token) and [video](https://youtu.be/v5Nj3Jy4vaI?t=335)  | [Documentation](https://docs.gitlab.com/ee/api/project_access_tokens.html) and [video](https://youtu.be/v5Nj3Jy4vaI?t=349)  |\n\n## Token management best practices\n\n### Principle of least privilege\n\nMitigate risk by restricting assigned permissions to tokens required for their respective tasks. This allows you to proactively predict and troubleshoot points of failure in your systems. You can do this by: \n\n* Selecting the right token for the right job. See the flowchart.  \n* Assign only the required scopes when creating a token. For example, use read-only scopes for tokens with auditor-like jobs. See [roles](https://docs.gitlab.com/ee/user/permissions.html#roles).  \n* Avoid granting administrative privileges unless specifically required.  \n* Enforce instance-wide default token [lifetimes](https://docs.gitlab.com/ee/administration/settings/account_and_limit_settings.html#set-a-lifetime-1).  \n* Regularly review and audit token permissions to ensure they align with current operational needs.  \n* Revoke tokens once the task is complete.\n\n### Service accounts\n\n[Service accounts](https://docs.gitlab.com/ee/user/profile/service_accounts.html) ensure tokens are tied to non-human entities, separating them from individual user accounts and reducing dependency on specific users. Instead of using personal accounts to generate tokens for automation, create service accounts with limited scopes. Benefits include:\n\n* Usage of service account tokens in CI/CD pipelines to avoid disruptions caused by user account changes  \n* Programmatically automate rotation processes, as personal accounts remain unaffected  \n* Clearer monitoring and auditing trail of actions taken by service accounts  \n* Service accounts with [no expiration](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#create-a-service-account-personal-access-token-with-no-expiry-date) date  \n* Does not consume [a license seat](https://docs.gitlab.com/user/profile/service_accounts/#create-a-service-account)\n\nGitLab plans to release a new [Service Accounts UI](https://gitlab.com/groups/gitlab-org/-/epics/9965) as a counterpart to its [API-based creation](https://docs.gitlab.com/ee/api/user_service_accounts.html#create-a-service-account-user), designed to simplify the management of service accounts and their associated tokens. Check out the demo below on the programmatic usage of service accounts.\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/oZvjg0SCsqY?si=cj-0LjfeonLGXv9u\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n### Vulnerability tools\n\nLeverage GitLab’s built-in security tools to identify and mitigate vulnerabilities associated with token usage. For maximum coverage, it is recommended to use them all in tandem.\n\n* [Secret Detection](https://docs.gitlab.com/ee/user/application_security/secret_detection/): Scans your repository for hardcoded secrets like API tokens, passwords, and other sensitive information. View the [list of detected secrets](https://docs.gitlab.com/ee/user/application_security/secret_detection/detected_secrets.html).  \n* [Static Application Security Testing (SAST)](https://docs.gitlab.com/ee/user/application_security/sast/): Analyzes your source code for security vulnerabilities and [provides reports with UI findings in merge requests](https://docs.gitlab.com/ee/user/application_security/sast/#features), among other features.  \n* [Dependency Scanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/): Ensures that third-party libraries used in your project do not expose token-related vulnerabilities. \n\n### Audit logs and monitoring\n\nMaintain token health by regularly reviewing audit logs and token usage, instance- and/or group-wide.\n\n* [Audit events](https://docs.gitlab.com/ee/user/compliance/audit_events.html): Enable audit event logging in GitLab to track token-related activities such as creation, usage, deletion and unusual API calls (unpermitted parameters in logs, and consistent triggers of the rate limiter). \n* [IP allowlisting](https://docs.gitlab.com/ee/administration/reporting/ip_addr_restrictions.html#configure-ip-address-restrictions): Helps prevent malicious users from hiding their activities behind multiple IP addresses.  \n* [Alerts](https://docs.gitlab.com/ee/operations/incident_management/alerts.html): Set up alerts for unusual activities (trigger paging for on-call rotations or be used to create incidents).  \n* [Credentials inventory](https://docs.gitlab.com/ee/administration/credentials_inventory.html): Complete control of all available access tokens with the ability to revoke as needed.  \n* [Notifications](https://docs.gitlab.com/ee/user/profile/notifications.html): Proactively handle any token (group, project, and personal) expiration notification emails you receive. Based on customer demand, this feature was recently extended to include 30-day and 60-day notifications from the seven-day default.   \n* [Webhooks](https://docs.gitlab.com/ee/user/project/integrations/webhooks.html#create-a-webhook): Access token webhooks can be configured on groups and projects to send seven-day token expiry events. This feature was also recently extended to include 30-day and 60-day notifications behind the **:extended_expiry_webhook_execution_setting** feature flag (disabled by default).\n\n## What's next\n\nWith GitLab’s large token catalog, there are ongoing [plans](https://gitlab.com/gitlab-org/gitlab/-/issues/502630) for consolidation with a focus on the lifetime, fine-grained scopes, consistent management, and usage. Our current prioritized token-related features include a complete UI for service accounts, additional credential types in the credentials inventory, and improved auditing for tokens and service accounts.\n\n> Sign up for a [free trial of GitLab Ultimate](https://about.gitlab.com/free-trial/) to start using token management.",[719,693,9,695,717],{"slug":3792,"featured":91,"template":698},"the-ultimate-guide-to-token-management-at-gitlab","content:en-us:blog:the-ultimate-guide-to-token-management-at-gitlab.yml","The Ultimate Guide To Token Management At Gitlab","en-us/blog/the-ultimate-guide-to-token-management-at-gitlab.yml","en-us/blog/the-ultimate-guide-to-token-management-at-gitlab",{"_path":3798,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3799,"content":3804,"config":3809,"_id":3811,"_type":14,"title":3812,"_source":16,"_file":3813,"_stem":3814,"_extension":19},"/en-us/blog/there-is-no-mlops-without-devsecops",{"title":3800,"description":3801,"ogTitle":3800,"ogDescription":3801,"noIndex":6,"ogImage":1167,"ogUrl":3802,"ogSiteName":685,"ogType":686,"canonicalUrls":3802,"schema":3803},"Building GitLab with GitLab: Why there is no MLOps without DevSecOps","Follow along as data scientists adopt DevSecOps practices and enjoy the benefits of automation, repeatable workflows, standardization, and automatic provisioning of infrastructure.","https://about.gitlab.com/blog/there-is-no-mlops-without-devsecops","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Building GitLab with GitLab: Why there is no MLOps without DevSecOps\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"William Arias\"}],\n        \"datePublished\": \"2023-10-05\",\n      }",{"title":3800,"description":3801,"authors":3805,"heroImage":1167,"date":3806,"body":3807,"category":801,"tags":3808},[3135],"2023-10-05","Building predictive models requires a good amount of experimentation and\niterations. Data scientists building those models usually implement\nworkflows involving several steps such as data loading, processing,\ntraining, testing, and deployment. Such workflows or data science pipelines\ncome with a set of challenges on their own; some of these common challenges\nare:\n\n- prone to error due to manual steps\n\n- experimentation results that are hard to replicate\n\n- long training time of machine learning (ML) models \n\n\nWhen there is a challenge, there is also an opportunity; in this case, those\nchallenges represent an opportunity for data scientists to adopt DevSecOps\npractices and enjoy the benefits of automation, repeatable workflows,\nstandardization, and automatic provisioning of infrastructure needed for\ndata-driven applications at scale.\n\n\nThe [Data Science team at\nGitLab](https://about.gitlab.com/handbook/business-technology/data-team/organization/data-science/)\nis now utilizing the GitLab DevSecOps Platform in their workflows,\nspecifically to:\n\n- enhance experiment reproducibility by ensuring code and data execute in a\nstandardized container image\n\n- automate training and re-training of ML models with GPU-enabled CI/CD\n\n- leverage ML experiment tracking, storing the most relevant metadata and\nartifacts produced by data science pipelines automated with CI\n\n\nAt GitLab, we are proponents of \"dogfooding\" our platform and sharing how we\nuse GitLab to build GitLab. What follows is a detailed look at the Data\nScience team's experience.\n\n\n### Enhancing experiment reproducibility \n\nA baseline step to enhance reproducibility is having a common and standard\nexperiment environment for all data scientists to run experiments in their\nJupyter Notebooks. A standard data science environment ensures that all team\nmembers use the same software dependencies. A way to achieve this is by\nbuilding a container image with all the respective dependencies under\nversion control and re-pulling it every time a new version of the code is\nrun. This process is illustrated in the figure below:\n\n\n![build](https://about.gitlab.com/images/blogimages/2023-10-04-there-is-no-mlops-without-devsecops/build-2.png)\n\nData science image of automatic build using GitLab CI \n\n{: .note.text-center}\n\n\nYou might wonder if the image gets built every time there is a new commit.\nThe answer is \"no\" since that would result in longer execution times, and\nthe image dependencies versions don’t change frequently, rendering it\nunnecessary to build it every time there is a new commit. Therefore, once\nthe standard image is automatically built by the pipeline, it is pushed to\nthe GitLab Container Registry, where it is stored and ready to be pulled\nevery time changes to the model code are introduced, and re-training is\nnecessary.\n\n\n![registry](https://about.gitlab.com/images/blogimages/2023-10-04-there-is-no-mlops-without-devsecops/registry.png)\n\nGitLab Container Registry with image automatically built and pushed by a CI\npipeline\n\n{: .note.text-center}\n\n\nChanges to the image dependencies or Dockerfile require a [merge\nrequest](https://docs.gitlab.com/ee/user/project/merge_requests/) and an\napproval process.\n\n\n### How to build the data science image using GitLab CI/CD\n\nConsider this project structure:\n\n\n```\n\nnotebooks/\n\n.gitlab-ci.yml\n\nDockerfile\n\nconfig.yml\n\nrequirements.txt\n\n```\n\nGitLab's Data Science team already had a pre-configured JupyterLab image\nwith packages such as [gitlabds](https://pypi.org/project/gitlabds/1.0.0/)\nfor common data preparation tasks and modules to enable Snowflake\nconnectivity for loading raw data. All these dependencies are reflected in\nthe Dockerfile at the root of the project, plus all the steps necessary to\nbuild the image: \n\n\n```\n\nFROM nvcr.io/nvidia/cuda:12.1.1-base-ubuntu22.04\n\nCOPY .    /app/\n\nWORKDIR /app\n\nRUN apt-get update\n\nRUN apt-get install -y python3.9\n\nRUN apt-get install -y python3-pip\n\nRUN pip install -r requirements.txt\n\n```\n\n\nThe instructions to build the data science image start with using Ubuntu\nwith CUDA drivers as a base image. We are using this baseline image because,\nmoving forward, we will use GPU hardware to train models. The rest of the\nsteps include installing Python 3.9 and the dependencies listed in\n`requirements.txt` with their respective versions. \n\n\nAutomatically building the data science image using [GitLab\nCI/CD](https://about.gitlab.com/topics/ci-cd/) requires us to create the\n`.gitlab-ci.yml ` at the root of the project and use it to describe the jobs\nwe want to automate. For the time being, let’s focus only on the\n`build-ds-image`job:\n\n\n```\n\nvariables:\n  DOCKER_HOST: tcp://docker:2375\n  MOUNT_POINT: \"/builds/$CI_PROJECT_PATH/mnt\"\n  CONTAINER_IMAGE: \"$CI_REGISTRY_IMAGE/main-image:latest\"\n\nstages:\n    - build\n    - train\n    - notify\ninclude:\n  - template: 'Workflows/MergeRequest-Pipelines.gitlab-ci.yml'\nworkflow:\n  rules:\n    - if: $CI_PIPELINE_SOURCE == \"merge_request_event\"\n    - if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS\n      when: never\n\nbuild-ds-image:\n  tags: [ saas-linux-large-amd64 ]\n  stage: build\n  services:\n    - docker:20.10.16-dind\n  image:\n    name: docker:20.10.16\n  script:\n    - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY\n    - docker build -t $CONTAINER_IMAGE .\n    - docker push $CONTAINER_IMAGE\n  rules:\n    - if: '$CI_PIPELINE_SOURCE == \"merge_request_event\" && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH'\n      changes:\n       - Dockerfile\n       - requirements.txt\n\n  allow_failure: true\n```\n\n\nAt a high level, the job `build-ds-image`:\n\n- uses a docker-in-docker service (dind) necessary to create docker images\nin GitLab CI/CD.\n\n- uses [predefined variables](link) to log into the GitLab Container\nRegistry, build the image, tag it using $CONTAINER_IMAGE variable, and push\nit to the registry. These steps are declared in the script section lines.\n\n- leverages a  `rules` section to evaluate conditions to determine if the\njob should be created. In this case, this job runs only if there are changes\nto the Dockerfile and requirements.txt file and if those changes are created\nusing a merge request.\n\n\nThe conditions declared in `rules` helps us optimize the pipeline running\ntime since the image gets rebuilt only when necessary.\n\n\nA complete pipeline can be found in this example project, along with\ninstructions to trigger the automatic creation of the data science image:\n[Data Science CI\npipeline](https://gitlab.com/gitlab-data/data-science-ci-example/-/blob/main/.gitlab-ci.yml?ref_type=heads).\n\n\n### Automate training and re-training of ML models with GPU-enabled CI/CD\n\nGitLab offers the ability to leverage GPU hardware and, even better, to get\nthis hardware automatically provisioned to run jobs declared in the\n.gitlab-ci.yml file. We took advantage of this capability to train our ML\nmodels faster without spending time setting up or configuring graphics card\ndrivers. Using GPU hardware ([GitLab\nRunners](https://docs.gitlab.com/ee/ci/runners/saas/gpu_saas_runner.html))\nrequires us to add this line to the training job: \n\n\n```\n\ntags:\n        - saas-linux-medium-amd64-gpu-standard\n```\n\n\nThe tag above will ensure that a GPU GitLab Runner automatically picks up\nevery training job.\n\nLet’s take a look at the entire training job in the .gitlab-ci.yml file and\nbreak down what it does:\n\n\n```\n\ntrain-commit-activated:\n    stage: train\n    image: $CONTAINER_IMAGE\n    tags:\n        - saas-linux-medium-amd64-gpu-standard\n    script:\n        - echo \"GPU training activated by commit message\"\n        - echo \"message passed is $CI_COMMIT_MESSAGE\"\n        - notebookName=$(echo ${CI_COMMIT_MESSAGE/train})\n        - echo \"Notebook name $notebookName\"\n        - papermill -p is_local_development False -p tree_method 'gpu_hist' $notebookName -\n    rules:\n        - if: '$CI_COMMIT_BRANCH == \"staging\"'\n          when: never\n        - if: $CI_COMMIT_MESSAGE =~ /\\w+\\.ipynb/\n          when: always\n          allow_failure: true\n    artifacts:\n      paths:\n        - ./model_metrics.md\n````\n\n\nLet’s start with this block:\n\n\n```\n\ntrain-commit-activated:\n    stage: train\n    image: $CONTAINER_IMAGE\n    tags:\n        - saas-linux-medium-amd64-gpu-standard\n```\n\n\n- **train-commit-activated** This is the name of the job. Since the model\ntraining gets activated given a specific pattern in the commit message, we\nuse a descriptive name to easily identify it in the larger pipeline.\n\n- **stage: train** This specifies the pipeline stage where this job belongs.\nIn the first part of the CI/CD configuration, we defined three stages for\nthis pipeline: `build`, `train`,  and `notify`. This job comes after\nbuilding the data science container image. The order is essential since we\nfirst need the image built to run our training code in it.\n\n- **image: $CONTAINER_IMAGE** Here, we specify the Docker image built in the\nfirst job that contains the CUDA drivers and necessary Python dependencies\nto run this job. $CONTAINER_IMAGE is a user-defined variable specified in\nthe variables section of the .gitlab-ci.yml file. \n\n- **tags: saas-linux-medium-amd64-gpu-standard** As mentioned earlier, using\nthis line, we ask GitLab to automatically provision a GPU-enabled Runner to\nexecute this job.\n\n\nThe second block of the job:\n\n\n```\n\nscript:\n        - echo \"GPU training activated by commit message\"\n        - echo \"message passed is $CI_COMMIT_MESSAGE\"\n        - notebookName=$(echo ${CI_COMMIT_MESSAGE/train})\n        - echo \"Notebook name $notebookName\"\n        - papermill -p is_local_development False -p tree_method 'gpu_hist' $notebookName -\n```\n\n\n- **script** This section contains the commands in charge of running the\nmodel training. The execution of this job is conditioned to the contents of\nthe  commit message. The commit message must have the name of the Jupyter\nNotebook that contains the actual model training code.\n\n\nThe rationale behind this approach is that we wanted to keep the data\nscientist workflow as simple as possible. The team had already adopted the\n[modeling\ntemplates](https://gitlab.com/gitlab-data/data-science/-/tree/main/templates)\nto start building predictive models quickly. Plugging the CI pipeline into\ntheir modeling workflow was a priority to ensure productivity would remain\nintact. With these steps:\n\n\n```\n\nnotebookName=$(echo ${CI_COMMIT_MESSAGE/train})\n        - echo \"Notebook name $notebookName\"\n        - papermill -p is_local_development False -p tree_method 'gpu_hist' $notebookName -\n```\n\n\nThe CI pipeline captures the name of the Jupyter Notebook with the training\nmodeling template and passes parameters to ensure\n[XGBoost](https://xgboost.readthedocs.io/en/stable/) uses the provisioned\nGPU. You can find an example of the Jupyter modeling template that is\nexecuted in this job\n[here](https://gitlab.com/gitlab-data/data-science-ci-example/-/blob/main/notebooks/training_example.ipynb?ref_type=heads).\n\n\nOnce the data science image is built, it can be reutilized in further model\ntraining jobs. The `train-commit-activated` job pulls the image from the\nGitLab Container Registry and utilizes it to run the ML pipeline defined in\nthe training notebook. This is illustrated in the `CI Job - Train model` in\nthe figure below:\n\n\n![training](https://about.gitlab.com/images/blogimages/2023-10-04-there-is-no-mlops-without-devsecops/training_job.png)\n\nTraining job executes ML pipeline defined in the modeling notebook\n\n{: .note.text-center}\n\n\nSince our image contains CUDA drivers and GitLab automatically provisions\nGPU-enabled hardware, the training job runs significantly faster with\nrespect to standard hardware.\n\n\n### Using GitLab ML experiment tracker\n\nEach model training execution triggered using GitLab CI is an experiment\nthat needs tracking. Using Experiment tracking in GitLab helps us to record\nmetadata that comes in handy to compare model performance and collaborate\nwith other data scientists by making result experiments available for\neveryone and providing a detailed history of the model development.\n\n\n![experiments](https://about.gitlab.com/images/blogimages/2023-10-04-there-is-no-mlops-without-devsecops/experiments.png)\n\nExperiments automatically logged on every CI pipeline GPU training run \n\n{: .note.text-center}\n\n\nEach model artifact created can be traced back to the pipeline that\ngenerated it, along with its dependencies:\n\n\n![traceability](https://about.gitlab.com/images/blogimages/2023-10-04-there-is-no-mlops-without-devsecops/traceability_small.png)\n\nModel traceability from pipeline run to candidate details\n\n{: .note.text-center}\n\n\n### Putting it all together\n\nWhat is machine learning without data to learn from? We also leveraged the\n[Snowflake](https://www.snowflake.com/en/) connector in the model training\nnotebook and automated the data extraction whenever the respective commit\ntriggers a training job. Here is an architecture of the current solution\nwith all the parts described in this blog post:\n\n\n![process](https://about.gitlab.com/images/blogimages/2023-10-04-there-is-no-mlops-without-devsecops/training_fixed.png)\n\nData Science pipelines automated using GitLab DevSecops Platform\n\n{: .note.text-center}\n\n\n| Challenge | Solution |\n\n| ------ | ------ | \n\n|Prone to error due to manual steps | Automate steps with [GitLab\nCI/CD](https://docs.gitlab.com/ee/ci/) |\n\n|Experimentation results that are hard to replicate    |  Record metadata\nand model artifacts with [GitLab Experiment\nTracker](https://docs.gitlab.com/ee/user/project/ml/experiment_tracking/)   \n|\n\n|The long training time of machine learning models     |  Train models with\n[GitLab SaaS GPU\nRunners](https://docs.gitlab.com/ee/ci/runners/saas/gpu_saas_runner.html)  |\n\n\nIterating on these challenges is a first step towards MLOps, and we are at\nthe tip of the iceberg; in coming iterations, we will adopt security\nfeatures to ensure model provenance (software bill of materials) and code\nquality, and to monitor our ML workflow development with value stream\ndashboards. But so far, one thing is sure: **There is no MLOps without\nDevSecOps**.\n\n\nGet started automating your data science pipelines, follow this\n[tutorial](https://about.gitlab.com/handbook/business-technology/data-team/platform/ci-for-ds-pipelines/)\nand clone this\n[data-science-project](https://gitlab.com/gitlab-data/data-science-ci-example)\nto follow along and watch this demo of using GPU Runners to train\n[XGBoost](https://xgboost.readthedocs.io/en/stable/) model.\n\n\nSee how data scientists can train ML models with GitLab GPU-enabled Runners\n(XGBoost 5-minute demo):\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/tElegG4NCZ0?si=L1IZfx_UGv6u81Gk\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## More \"Building GitLab with GitLab\" blogs\n\nRead more of our \"Building GitLab with GitLab\" series:\n\n- [How we use Web API fuzz\ntesting](https://about.gitlab.com/blog/building-gitlab-with-gitlab-api-fuzzing-workflow/)\n\n- [How GitLab.com inspired GitLab\nDedicated](https://about.gitlab.com/blog/building-gitlab-with-gitlabcom-how-gitlab-inspired-dedicated/)\n",[719,951,9,803],{"slug":3810,"featured":6,"template":698},"there-is-no-mlops-without-devsecops","content:en-us:blog:there-is-no-mlops-without-devsecops.yml","There Is No Mlops Without Devsecops","en-us/blog/there-is-no-mlops-without-devsecops.yml","en-us/blog/there-is-no-mlops-without-devsecops",{"_path":3816,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3817,"content":3822,"config":3827,"_id":3829,"_type":14,"title":3830,"_source":16,"_file":3831,"_stem":3832,"_extension":19},"/en-us/blog/tips-for-a-successful-jira-to-gitlab-migration",{"title":3818,"description":3819,"ogTitle":3818,"ogDescription":3819,"noIndex":6,"ogImage":1715,"ogUrl":3820,"ogSiteName":685,"ogType":686,"canonicalUrls":3820,"schema":3821},"Tips for a successful Jira to GitLab migration","If you are considering a migration to GitLab for Agile planning, this blog is for you! Learn some tips to help in your transition.","https://about.gitlab.com/blog/tips-for-a-successful-jira-to-gitlab-migration","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Tips for a successful Jira to GitLab migration\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Melissa Ushakov\"}],\n        \"datePublished\": \"2023-10-24\",\n      }",{"title":3818,"description":3819,"authors":3823,"heroImage":1715,"date":3824,"body":3825,"category":1053,"tags":3826},[1720],"2023-10-24","\n_Atlassian is ending support for all Server products in February 2024. [Learn more about the benefits of migrating from Atlassian to GitLab here](https://about.gitlab.com/move-to-gitlab-from-atlassian/)._\n\nWith the [end of life of Jira Server approaching](https://about.gitlab.com/blog/atlassian-server-ending-move-to-a-single-devsecops-platform/), many organizations are considering a migration from Jira to GitLab for Agile planning to streamline organizational processes. Moving your team to a new Agile planning tool can seem daunting, but this change is worth the effort. GitLab empowers your teams to collaborate seamlessly in a single DevSecOps platform, leading to increased delivery of user value. We have helped many customers of all sizes and Agile maturities transition to GitLab and are ready to help. In this blog, you'll find practical tips to help you in your journey migrating to GitLab for Agile planning.\n\n## Tips for your migration\n\n### Examine your Jira setup\n\nTake the time to understand your current Jira setup — consider this an opportunity to [simplify your configuration](https://about.gitlab.com/blog/five-reasons-to-simplify-agile-planning-tool-configuration-gitlab/). Moving to a new Agile planning tool allows you to rethink processes and find ways to empower your teams to deliver software faster.\n\n### Start with an integration\n\nThe [GitLab Jira integration](https://docs.gitlab.com/ee/integration/jira/configure.html) allows you to see your Jira issues in GitLab. That way, you can retain Jira as your source of truth for Agile planning while you migrate other DevSecOps processes into GitLab. Your engineers will be able to stay within GitLab for their day-to-day work, and Product, Project, and Design team members can explore GitLab before transitioning. Easing your way into a new tool gives your users time to adjust to a new way of working.\n\n![Issue View in Jira](https://about.gitlab.com/images/blogimages/atlassianjira/issueview.png){: .shadow.medium.center}\nIssue View in Jira\n{: .note.text-center}\n\n### Plan your GitLab setup\n\nFocusing on planning up front will save you time down the road and prevent unnecessary change management. GitLab has several key features that can greatly improve your experience and allow you to get the best value from your investment, including:\n- [Groups and projects](https://docs.gitlab.com/ee/user/group/): Jira doesn't allow you to create a hierarchy for containers of Agile planning work items. Groups and projects in GitLab unlock portfolio \"team of teams\" planning, permissions management, and data rollups. Consider how you want to roll up data and assign permissions when creating a group and project structure. We recommend creating a group to represent your team where you can manage epics, labels, and boards. Child projects under your team's groups can house your repositories and related issues.\n- [Labels](https://docs.gitlab.com/ee/user/project/labels.html): GitLab offers many ways to customize your experience based on labels. We recommend creating a set of scoped labels to represent statuses. We also often see users create scoped labels for work categorization, such as `bug`, `enhancement`, or `maintentance`. If you had a custom field for a value in Jira or you want to be able to filter or pull analytics by a specific attribute, just create a label for it in GitLab.\n- [Milestones and iterations](https://docs.gitlab.com/ee/user/project/milestones/): Use milestones to longer-running planning, such as Releases or Program Increments in the Scaled Agile Framework® (SAFe®). This is typically a Fix Version in Jira. Iterations represent team sprints. In GitLab, you can view a burnup or burndown chart for both milestones and iterations.\n\n![Group and project example](https://about.gitlab.com/images/blogimages/2023-09-28-tips-for-a-successful-jira-to-gitlab-migration/group_projects_setup.png){: .shadow.medium.center}\nUse groups and projects to model your organizational hierarchy\n{: .note.text-center}\n\n### Determine the scope of your data migration\n\nMigrating to a new tool is an opportunity to do some backlog clean up! When deciding what data to migrate, focus on what you'll need to successfully continue your team's work. Bringing large volumes of historical data to a new tool will increase the complexity and effort to migrate, and many times offers limited value. Historical data can remain in your Jira instance, or you can archive it so that it's accessible without migrating it into GitLab. Once you've narrowed down your dataset, use [the Jira issue importer](https://docs.gitlab.com/ee/user/project/import/jira.html) to migrate data into GitLab. Our [REST](https://docs.gitlab.com/ee/api/rest/index.html) and [GraphQL](https://docs.gitlab.com/ee/api/graphql/index.html) APIs are available for those that want to build a more customized migration experience.\n\n![Jira importer](https://about.gitlab.com/images/blogimages/2023-09-28-tips-for-a-successful-jira-to-gitlab-migration/import_issues_from_jira.png){: .shadow.small.center}\nUsing GitLab Jira importer, you can import your Jira issues to GitLab.com or to your self-managed GitLab instance\n{: .note.text-center}\n\n### Plan your migration timeline\n\nIn a typical organization, there will be different levels of proccess complexity and maturity within the teams migrating to GitLab. Run a pilot with a team that has a simple setup and is willing to provide feedback. This will allow you to smooth out potential problems, improve your configuration, and document best practices. We recommend planning for time in your schedule to iterate on your configuration.\n\n### Document and share best practices\n\nKnowledge sharing is an important part of improving your process. Provide teams that are onboarding the opportunity to give feedback and share best practices. You can use [GitLab wikis](https://docs.gitlab.com/ee/user/project/wiki/) to serve as a central source of knowledge for your Agile planning process and to document best practices.\n\n\n### Monitor progress and improvements\n\nAfter migrating to GitLab, your teams will unlock new analytics capabilties that allow you to track the flow of value across a value stream. As a result, you'll have the data you need to celebrate successes and learn from missteps. With [Value Stream Analytics](https://docs.gitlab.com/ee/user/group/value_stream_analytics/) and the [Value Stream Dashboard](https://docs.gitlab.com/ee/user/analytics/value_streams_dashboard.html), you can monitor the positive effect of moving Agile planning workflows from a siloed tool to an integrated DevSecOps platform.\n\n![Value Stream Analytics gives you an overview of the time spent at each stage of your DevSecOps workflow](https://about.gitlab.com/images/blogimages/2023-05-07-vsa-overview.gif){: .shadow.medium.center}\nValue Stream Analytics gives you an overview of the time spent at each stage of your DevSecOps workflow\n{: .note.text-center}\n\n\n## Getting started\n\nBy following the tips outlined in this blog, you can help your migration be more efficient and maximize the potential of GitLab for your Agile needs. You can get started today with the [the Jira issue importer](https://docs.gitlab.com/ee/user/project/import/jira.html). Once you've onboarded into GitLab, you can scale your Agile processes further with [portfolio]( https://about.gitlab.com/solutions/agile-delivery/) and [value stream management](https://about.gitlab.com/solutions/value-stream-management/).\n\nHave questions about migrating from Jira to GitLab? [We're here to help](https://about.gitlab.com/sales/).\n",[908,9],{"slug":3828,"featured":6,"template":698},"tips-for-a-successful-jira-to-gitlab-migration","content:en-us:blog:tips-for-a-successful-jira-to-gitlab-migration.yml","Tips For A Successful Jira To Gitlab Migration","en-us/blog/tips-for-a-successful-jira-to-gitlab-migration.yml","en-us/blog/tips-for-a-successful-jira-to-gitlab-migration",{"_path":3834,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3835,"content":3840,"config":3844,"_id":3846,"_type":14,"title":3847,"_source":16,"_file":3848,"_stem":3849,"_extension":19},"/en-us/blog/top-10-gitlab-technical-blogs-of-2023",{"title":3836,"description":3837,"ogTitle":3836,"ogDescription":3837,"noIndex":6,"ogImage":772,"ogUrl":3838,"ogSiteName":685,"ogType":686,"canonicalUrls":3838,"schema":3839},"Top 10 GitLab technical blogs of 2023","2023 was a big year! Catch up on expert insights into DevSecOps, AI, CI/CD, and more.","https://about.gitlab.com/blog/top-10-gitlab-technical-blogs-of-2023","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Top 10 GitLab technical blogs of 2023\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sandra Gittlen\"}],\n        \"datePublished\": \"2024-01-09\",\n      }",{"title":3836,"description":3837,"authors":3841,"heroImage":772,"date":2413,"body":3842,"category":1277,"tags":3843},[1295],"2023 brought fresh insights from experts across GitLab and beyond —  all of them focused on the challenges and opportunities facing DevSecOps teams. From Lockheed Martin to CARFAX, organizations are trying to understand and unlock the power of technologies such as artificial intelligence (AI), CI/CD, security automation, and more. Our experts provided tips, best practices, and tutorials to use throughout the software development lifecycle.\n\nHere are the top 10 technical blogs from what was an incredible year in DevSecOps innovation.\n\n**1. [Jenkins to GitLab: The ultimate guide to modernizing your CI/CD environment](https://about.gitlab.com/blog/jenkins-gitlab-ultimate-guide-to-modernizing-cicd-environment/)**\nLooking for a smooth transition from Jenkins to GitLab? Follow this step-by-step tutorial to learn how GitLab's integrated CI/CD capabilities help deliver high-quality software faster.\n\n**2. [U.S. Navy Black Pearl: Lessons in championing DevSecOps](https://about.gitlab.com/blog/u-s-navy-black-pearl-lessons-in-championing-devsecops/)**\nSigma Defense's director of engineering details what it's like to manage the U.S. Navy's Black Pearl, which uses GitLab as its DevSecOps platform. The DevSecOps champion relays his experience implementing DevSecOps and the benefits of that decision.\n\n**3. [Quickstart guide for GitLab Remote Development workspaces](https://about.gitlab.com/blog/quick-start-guide-for-gitlab-workspaces/)**\nEnabling developers to work in their preferred environments empowers DevSecOps teams to build and deliver software more efficiently. With these quickstart instructions, developers can create a workspace, use the Web IDE Terminal to install dependencies or start their server, and view their running application.\n\n**4. [Introducing the GitLab CI/CD Catalog Beta](https://about.gitlab.com/blog/introducing-the-gitlab-ci-cd-catalog-beta/)**\nCI/CD catalogs are a game-changer, allowing developers to discover, integrate, and share pre-existing CI/CD components with ease. This tutorial shows how to get the most from this new DevSecOps platform feature.\n\n**5. [Combine GitLab Flow and GitLab Duo for a workflow powerhouse](https://about.gitlab.com/blog/gitlab-flow-duo/)**\nGitLab Flow and GitLab Duo can help organizations achieve significant improvements in end-to-end workflow efficiency that can lead to higher levels of productivity, deployment frequency, code quality and overall security, and production resiliency and availability. Find out how with this step-by-step guide.\n\n**6. [Efficient DevSecOps workflows: Hands-on python-gitlab API automation](https://about.gitlab.com/blog/efficient-devsecops-workflows-hands-on-python-gitlab-api-automation/)**\nThe python-gitlab library is a useful abstraction layer for the GitLab API. Dive into hands-on examples and best practices in this tutorial.\n\n**7. [Building GitLab with GitLab: Why there is no MLOps without DevSecOps](https://about.gitlab.com/blog/there-is-no-mlops-without-devsecops/)**\nAt GitLab, we believe in the power of MLOps, especially when combined with DevSecOps. So follow along as our data scientists adopt DevSecOps practices and enjoy the benefits of automation, repeatable workflows, standardization, and automatic provisioning of infrastructure.\n\n**8. [Explore the Dragon Realm: Build a C++ adventure game with a little help from AI](https://about.gitlab.com/blog/building-a-text-adventure-using-cplusplus-and-code-suggestions/)**\nReaders are invited to create a mystical world while learning how to integrate AI into their coding environment. This tutorial demonstrates how to use GitLab Duo Code Suggestions to create a text-based adventure game, including magical locations to visit and items to procure, using C++. \n\n**9. [How GitLab's Red Team automates C2 testing](https://about.gitlab.com/blog/how-gitlabs-red-team-automates-c2-testing/)**\nThe GitLab Red Team conducts security exercises that simulate real-world threats. They apply professional development practices to using the same open source C2 tools as threat actors. In this tutorial, the GitLab Red Team shares how they implement continuous testing for the Mythic framework, their design philosophy, and a public project that can be forked for use by other Red Teams.\n\n**10. [Building GitLab with GitLab: How GitLab.com inspired Dedicated](https://about.gitlab.com/blog/building-gitlab-with-gitlabcom-how-gitlab-inspired-dedicated/)**\nThe design of GitLab Dedicated, our single-tenancy SaaS version of the DevSecOps platform, came from the lessons learned while building GitLab.com. In this peek behind the curtains, learn the considerations that sparked different decisions regarding automation, databases, monitoring, availability, and more – and what the outcome was.\n\nSign up for the GitLab newsletter using the form to the right to receive the latest blogs right in your inbox.\n",[803,109,9,951,693,719],{"slug":3845,"featured":91,"template":698},"top-10-gitlab-technical-blogs-of-2023","content:en-us:blog:top-10-gitlab-technical-blogs-of-2023.yml","Top 10 Gitlab Technical Blogs Of 2023","en-us/blog/top-10-gitlab-technical-blogs-of-2023.yml","en-us/blog/top-10-gitlab-technical-blogs-of-2023",{"_path":3851,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3852,"content":3858,"config":3863,"_id":3865,"_type":14,"title":3866,"_source":16,"_file":3867,"_stem":3868,"_extension":19},"/en-us/blog/top-10-gitlab-workflow-hacks-you-need-to-know",{"title":3853,"description":3854,"ogTitle":3853,"ogDescription":3854,"noIndex":6,"ogImage":3855,"ogUrl":3856,"ogSiteName":685,"ogType":686,"canonicalUrls":3856,"schema":3857},"Top 10 GitLab workflow hacks you need to know","A GitLab product manager shares her favorite tricks to navigate quickly and efficiently around the GitLab DevSecOps Platform and to boost team collaboration.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099361/Blog/Hero%20Images/Blog/Hero%20Images/lightvisibility_lightvisibility.png_1750099361252.png","https://about.gitlab.com/blog/top-10-gitlab-workflow-hacks-you-need-to-know","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Top 10 GitLab workflow hacks you need to know\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Amanda Rueda\"}],\n        \"datePublished\": \"2024-04-09\",\n      }",{"title":3853,"description":3854,"authors":3859,"heroImage":3855,"date":3860,"body":3861,"category":1277,"tags":3862},[1050],"2024-04-09","In the world of software development, efficiency isn't just about moving fast – it's about smart navigation. As a GitLab product manager, I truly understand the value of efficiency when working within the DevSecOps platform. These are my top 10 favorite GitLab features and they might be the workflow hacks you never knew you needed.\n\nLet's dive into these hidden gems to unlock a new level of productivity and collaboration within your team.\n\n## 1. Resolve comments\n\nNot just for merge requests! Resolving comments on issues can significantly reduce noise and streamline task management. It's particularly handy for managing feedback efficiently.\n\n> **Why do I love it?** Not only does resolving comments reduce the noise on an issue, but it’s also a great way to manage tasks.\n>\n> **Use case.** Resolving comments is a great tool for issues where you are collecting feedback – respond to the feedback and provide a link, resolve the comment, and move on to the next one.\n>\n> __[How-to documentation](https://docs.gitlab.com/ee/user/discussions/#resolve-a-thread)__\n\n![example of resolve comments - image 1](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099376/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750099376147.gif)\n\n\u003Cp>\u003C/p>\n\n## 2. Internal comments\n\nSpeak directly to your team without an external audience. Keep discussions private within an issue or merge request with comments visible only to your team members. It's the perfect balance between transparency and privacy.\n\n> **Why do I love it?** It balances privacy with transparency, while keeping the broader discussion open for the community.\n>\n> **Use case.** When coordinating a product launch, your marketing team can use internal comments to discuss and refine messaging and strategy. This keeps your discussions centralized and easily accessible to the team while in draft mode.\n>\n> **[How-to documentation](https://docs.gitlab.com/ee/user/discussions/#add-an-internal-note)**\n\n![internal comments example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099376/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750099376148.png)\n\n\u003Cp>\u003C/p>\n\n## 3. And/or in filters\n\nWhen searching records on a listing page, using and/or filters can help you slice through the noise and find exactly what you're looking for quickly and efficiently.\n\n> **Why do I love it?** Perfect for finding exactly what you need, powering efficient and streamlined workflows.\n>\n>**Use case.** Search for feature issues related to a specific initiative that are assigned to specific groups.\n>\n> __[How-to documentation](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#filter-with-the-or-operator)__\n\n![and/or filter example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099376/Blog/Content%20Images/Blog/Content%20Images/and_or__1__aHR0cHM6_1750099376152.gif)\n\n\u003Cp>\u003C/p>\n\n## 4. Auto expand URLs\n\nAppending '+' or '+s' to the end of a GitLab URL transforms it into an informative snippet, allowing you to share progress without forcing your teammates to leave the page.\n\n> **Why do I love it?** It's like having x-ray vision for URLs – see the important stuff without even clicking!\n>\n> **Use case.** Sharing progress in comments? Just add '+s' to the link, and boom – everyone's instantly on the same page.\n>\n> __[How-to documentation](https://docs.gitlab.com/ee/user/markdown.html#show-the-issue-merge-request-or-epic-title-in-the-reference)__\n\n![auto expand URLs example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099376/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750099376154.gif)\n\n\u003Cp>\u003C/p>\n\n## 5. Quick actions\n\nWith simple text commands, quick actions let you perform tasks like assigning users, adding labels, and more, directly from the description or comment box, saving you clicks and time.\n\n> **Why do I love it?** Saves clicks and time.\n>\n> **Use case.** When creating a new issue I use quick actions to automatically add labels, a milestone, and connect to the epic upon saving the record.\n>\n> __[How-to documentation](https://docs.gitlab.com/ee/user/project/quick_actions.html)__\n\n![quick actions example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099376/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750099376156.gif)\n\n\u003Cp>\u003C/p>\n\n## 6. Bulk edit\n\nApply labels, change assignees, or update milestones for multiple issues at once. This feature turns potentially tedious updates into a breeze, allowing for quick adjustments across numerous issues.\n\n> **Why do I love it?** Because it turns tedious updates into quick updates!\n>\n> **Use case.** Need to tag the whole sprint's issues as Review needed? Just filter, select all, and add that label in bulk – easy peasy.\n>\n> __[How-to documentation](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#bulk-edit-issues-from-a-project)__\n\n![bulk edit example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099376/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750099376157.gif)\n\n\u003Cp>\u003C/p>\n\n## 7. Epic swimlanes\n\nGroup issues under epics on your board to visually track and discuss progress. It's a powerful way to contextualize work during reviews or standups.\n\n> **Why do I love it?** Easily understand the context of work as you’re walking the board.\n>\n> **Use case.** Group by epic during standup reviews to easily piece together work with its parent initiative.\n>\n> __[How-to documentation](https://docs.gitlab.com/ee/user/project/issue_board.html#group-issues-in-swimlanes)__\n\n![epic swimlanes example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099376/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750099376158.gif)\n\n\u003Cp>\u003C/p>\n\n## 8. Wiki diagrams\n\nIllustrate ideas and workflows directly in your wiki pages with easy-to-create diagrams. This feature supports visual learning and simplifies complex concepts.\n\n> **Why do I love it?** It’s incredibly user-friendly and flexible.\n>\n> **Use case.** When outlining a new feature workflow, draw it directly in the wiki page, making it crystal clear for everyone on the team.\n>\n> __[How-to documentation](https://docs.gitlab.com/ee/administration/integration/diagrams_net.html)__\n\n![wiki diagrams example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099376/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750099376159.gif)\n\n\u003Cp>\u003C/p>\n\n## 9. Table creation\n\nForget about wrestling with markdown for table creation. The rich text editor lets you effortlessly insert and format tables, making documentation cleaner and more structured.\n\n> **Why do I love it?** It turns the table creation ordeal into a breeze, making updates clean and structured with just a few clicks.\n>\n> **Use case.** Compiling a sprint retro? Quickly insert a table to organize feedback, action items, and owners, making the review process smoother for everyone.\n>\n> __[How-to documentation](https://docs.gitlab.com/ee/user/rich_text_editor.html#tables)__\n\n![table creation example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099376/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750099376160.gif)\n\n\u003Cp>\u003C/p>\n\n## 10. Video and GIF embeds\n\nEnhance your issues and epic descriptions or comments with embedded GIFs and YouTube videos, adding a dynamic layer to your communication.\n\n> **Why do I love it?** Sometimes a GIF or video speaks better than words.\n>\n> **Use case.** Trying to explain a UI bug? Embed a YouTube video for a quick walkthrough of the proposed feature enhancement.\n\n![video and gif embed example](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099376/Blog/Content%20Images/Blog/Content%20Images/gif__1__aHR0cHM6_1750099376161.gif)\n\n\u003Cp>\u003C/p>\n\n## Explore these features\n\nThese features represent just the tip of the iceberg in GitLab's comprehensive toolkit designed to boost efficiency and foster better collaboration. While they may be underutilized, their impact on your workflow could be substantial. I encourage you to explore these features further and integrate them into your daily routines.\n\n> Are you excited to power your DevSecOps workflow using GitLab? [Try GitLab Ultimate for free for 30 days](https://gitlab.com/-/trial_registrations/new).\n",[719,9,695,888],{"slug":3864,"featured":6,"template":698},"top-10-gitlab-workflow-hacks-you-need-to-know","content:en-us:blog:top-10-gitlab-workflow-hacks-you-need-to-know.yml","Top 10 Gitlab Workflow Hacks You Need To Know","en-us/blog/top-10-gitlab-workflow-hacks-you-need-to-know.yml","en-us/blog/top-10-gitlab-workflow-hacks-you-need-to-know",{"_path":3870,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3871,"content":3876,"config":3882,"_id":3884,"_type":14,"title":3885,"_source":16,"_file":3886,"_stem":3887,"_extension":19},"/en-us/blog/track-machine-learning-model-experiments",{"title":3872,"description":3873,"ogTitle":3872,"ogDescription":3873,"noIndex":6,"ogImage":3130,"ogUrl":3874,"ogSiteName":685,"ogType":686,"canonicalUrls":3874,"schema":3875},"Track ML model experiments with new GitLab MLFlow integration","Track the many versions of your machine learning models on GitLab using the MLFlow client.","https://about.gitlab.com/blog/track-machine-learning-model-experiments","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Track ML model experiments with new GitLab MLFlow integration\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Eduardo Bonet\"}],\n        \"datePublished\": \"2023-05-11\",\n      }",{"title":3872,"description":3873,"authors":3877,"heroImage":3130,"date":3879,"body":3880,"category":801,"tags":3881},[3878],"Eduardo Bonet","2023-05-11","\n\n\u003Ci>This blog is the latest post in an ongoing series about GitLab’s journey to \u003Ca href=\"/blog/ai-ml-in-devsecops-series/\">build and integrate AI/ML into our DevSecOps platform\u003C/a>. The first blog post can be found \u003Ca href=\"/blog/what-the-ml-ai/\">here\u003C/a>. Throughout the series, we’ll feature blogs from our product, engineering, and UX teams to showcase how we’re infusing AI/ML into GitLab.\u003C/i>\n\nThe GitLab DevSecOps platform now features [Machine Learning Model Experiments](https://docs.gitlab.com/ee/user/project/ml/experiment_tracking/), which is avaliable to all GitLab users, making GitLab a powerful tool for creating ML models. Organizations can now track the many versions of their ML models within the GitLab user interface, using the open source [MLFlow](https://github.com/mlflow/mlflow).\n\n\u003Cimg src=\"/images/blogimages/2023-05-11-gitlab-model-experiments/experiment.png\" alt=\"Model experiment\" style=\"border: 1px solid gray;\">\n\n## What is an ML model?\n\nAn ML model is the result of three components: code to extract the patterns from the data, the data where the \npatterns are extracted from, and the configuration used for both, often called \"hyperparameters\". Any change to any of these components can \nlead to changes in the model performance, and keeping track of all of these parts and the results can be challenging. \nExperiment tracking aims to make sense of this confusion by keeping a record of all of the variations created, \nalong with the artifacts and results of each trial.\n\n[MLFlow](https://github.com/mlflow/mlflow) is a popular open source solution for ML experiment tracking, \nproviding a client to log different model versions and their metadata. However, it puts the cost of deployment and managing \nits server onto the users.\n\nGitLab makes the tracking process easier not by deploying a managed MLFlow backend, but by \u003Ci>being an MLFlow backend itself\u003C/i>. This marries the best of both worlds: Data scientists don't need to learn yet another client as their code requires minimal to no changes, while GitLab provides everything else. There is no need to manage a server or to implement user management, so there is no need to configure your artifact storage –  this is all provided by the GitLab DevSecOps platform.\n\n## ML model experiment features in GitLab 16.0\n\nWatch this overview of the available features in 16.0:\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/uxweU4zT40c\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n- **Create experiments and candidates using the MLFlow client**: Simply point the MLFlow client to your GitLab project and experiments and runs will be recorded on GitLab, with no additional setup necessary and no need to create a server. Note that MLFlow runs are called \"candidates\" in GitLab, as each of them is a candidate to become a version of a model.\n\n- **User access management**: Experiments are tied to a GitLab project, making it easy to control which users have access to which models. \n\n- **Manage candidates directly on the GitLab UI**: Search and explore your logged experiments on GitLab, using the UI you already know.\n\n- **Download candidate data as a CSV**: Data scientists that want to explore or create reports on an experiment can download the necessary data as a CSV file.\n\nTo get started, refer to the [documentation](https://docs.gitlab.com/ee/user/project/ml/experiment_tracking/#machine-learning-model-experiments).\n\n### More to come\n\nGitLab wants to help you manage the entire lifecycle of your machine learning model from creation to packaging, deployment, and monitoring. \nFor more information on what we are working on, keep an eye on the MLOps Incubation Engineering [handbook page](/handbook/engineering/incubation/mlops/) and on our [YouTube playlist](https://www.youtube.com/playlist?list=PL05JrBw4t0KpC6-JQy8lY4tNAZKXBaM_-).\n\nMachine Learning Model Experiments is an experimental feature available to all GitLab tiers, and we are looking for feedback so please [comment in this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/381660).\n\nContinue reading our \"[AI/ML in DevSecOps](/blog/ai-ml-in-devsecops-series/)\" series.\n\n_Disclaimer: This blog contains information related to upcoming products, features, and functionality. It is important to note that the information in this blog post is for informational purposes only. Please do not rely on this information for purchasing or planning purposes. As with all projects, the items mentioned in this blog and linked pages are subject to change or delay. The development, release, and timing of any products, features, or functionality remain at the sole discretion of GitLab._\n",[803,9,233,695],{"slug":3883,"featured":6,"template":698},"track-machine-learning-model-experiments","content:en-us:blog:track-machine-learning-model-experiments.yml","Track Machine Learning Model Experiments","en-us/blog/track-machine-learning-model-experiments.yml","en-us/blog/track-machine-learning-model-experiments",{"_path":3889,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3890,"content":3896,"config":3901,"_id":3903,"_type":14,"title":3904,"_source":16,"_file":3905,"_stem":3906,"_extension":19},"/en-us/blog/transform-code-quality-and-compliance-with-automated-processes",{"title":3891,"description":3892,"ogTitle":3891,"ogDescription":3892,"noIndex":6,"ogImage":3893,"ogUrl":3894,"ogSiteName":685,"ogType":686,"canonicalUrls":3894,"schema":3895},"Transform code quality and compliance with automated processes","Learn how GitLab Premium features address the technical debt and security vulnerability challenges that plague traditional approaches.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749660151/Blog/Hero%20Images/blog-image-template-1800x945__26_.png","https://about.gitlab.com/blog/transform-code-quality-and-compliance-with-automated-processes","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Transform code quality and compliance with automated processes\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Jessica Hurwitz\"}],\n        \"datePublished\": \"2024-12-13\",\n      }",{"title":3891,"description":3892,"authors":3897,"heroImage":3893,"date":3898,"body":3899,"category":717,"tags":3900},[843],"2024-12-13","While manual code review processes may suffice for a small team, as DevSecOps teams scale, the processes create significant bottlenecks that impede software development velocity and quality. Often slow, inconsistent, and frequently failing to catch critical vulnerabilities, the manual approach leads to technical debt and increased security risks.\n\nTo mitigate risks and drive innovation, organizations must prioritize automated code quality and compliance systems. The financial implications of poor code management are substantial, with technical debt consuming up to 40% of IT budgets ([McKinsey Digital: Tech Debt Report](https://www.mckinsey.com/capabilities/mckinsey-digital/our-insights/tech-debt-reclaiming-tech-equity)) and software vulnerabilities costing an average of $4.88 million per security breach ([IBM Cost of a Data Breach Report](https://www.ibm.com/reports/data-breach)). \n\nModern software development requires a strategic approach to code management and compliance that goes beyond traditional review processes. With more robust review systems and compliance controls, organizations can innovate and secure software faster than their competitors.\n\n## The power of code review and approval processes\n\nAccording to the [GitLab 2024 Global DevSecOps Report](https://about.gitlab.com/developer-survey/), C-level executives rank code quality as one of the top benefits of DevSecOps. With executives recognizing code quality as a strategic priority, systematic review processes have emerged as a cornerstone of modern development practices. \n\n[Code review](https://about.gitlab.com/topics/version-control/what-is-code-review/) processes benefit developers through knowledge sharing, the discovery of bugs earlier in the process, and improved security. However, developers say the top changes that could be made to improve job satisfaction are increasing automation and collaboration, according to our survey.\n\nAs code quality and code review processes are embedded into the software development lifecycle, focusing on systems that remove manual code review and enhance collaboration across teams will help keep developer workflows running smoothly. \n\n### Code review processes increase collaboration and development speed\n\nThe improvement in organizational efficiency can be seen in this example with [Airbus Intelligence](https://about.gitlab.com/customers/airbus/), a leader in the geospatial industry. The development teams at Airbus struggled with inefficient processes and needed tools that could help their team collaborate efficiently across the globe. After adopting GitLab Premium, Airbus quickly noticed the improvement in code quality. \n\nGitLab CI’s built-in security testing meant developers could identify bugs and vulnerabilities before they reached production. Instead of spending a full day setting up for production and doing manual tests, those simple tasks are now automated. \n\nAirbus’ release time dramatically decreased from 24 hours to just 10 minutes. \n\n“What used to happen is we would touch one part of the code and it would break another part. Now, each time a developer pushes code, we can immediately identify problems,” said Logan Weber, Software Automation Engineer at Airbus Defense and Space, Intelligence.\n\n### Features that enable higher code quality\n\nPowerful GitLab Premium features like [Multiple Approvers for Merge Requests](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/rules.html), [Code Quality](https://docs.gitlab.com/ee/ci/testing/code_quality.html) checks integration with third-party code quality solutions, and [Protected Branches](https://docs.gitlab.com/ee/user/project/repository/branches/protected.html), enable companies to innovate faster than their competitors. \n\nBy reducing review cycle times while strengthening code integrity and compliance, DevSecOps teams address both the technical debt and security vulnerability challenges that plague traditional approaches. These security benefits help teams like AirBus Intelligence develop faster, more secure solutions.  \n\n## Why enhanced compliance controls matter\n\nThe implementation of effective code compliance strategies is constantly evolving due to [changing regulations](https://about.gitlab.com/blog/meet-regulatory-standards-with-gitlab/), and keeping up with these regulations is a challenge for most companies. \n\nBy developing code compliance strategies and automated control mechanisms, companies ensure that quality and compliance policies are met. \n\nFor Airbus Intelligence, security and vulnerability scans built into integration testing enabled teams to catch security and compliance issues earlier in the process.\n\n[Continuous integration](https://about.gitlab.com/topics/ci-cd/#what-is-continuous-integration-ci) gives teams visibility into more projects and allows all team members to manage deployments. Expanded access controls improve cross-team collaboration and accountability. \n\n### Features that increase accountability \n\nGitLab Premium's [advanced compliance controls](https://about.gitlab.com/solutions/security-compliance/) create an unbroken chain of accountability throughout the development process, enabling organizations to systematically track and validate every code change.\n\nUsers have greater auditability of any change and can track commits. This is in addition to strict [access controls](https://docs.gitlab.com/ee/administration/settings/visibility_and_access_controls.html) that provide specific people with the ability to push and merge changes. With [audit logs](https://docs.gitlab.com/ee/user/compliance/audit_event_types.html), users can track and review changes and activities within the repository.\n\n## Ship software faster with GitLab Premium\n\n“It’s simple. All teams operate around this one tool. Instantly, that made communication easier. We wouldn’t be where we are today if we didn’t have GitLab in our stack,” according to Airbus' Weber.\n\nGitLab Premium represents more than just a tool — it's a comprehensive approach to software engineering that empowers development teams to deliver high-quality, secure, and efficient software solutions. \n\n> #### Discover why [customers are upgrading to GitLab Premium](https://about.gitlab.com/pricing/premium/why-upgrade/).",[1542,804,9,717,693,695],{"slug":3902,"featured":6,"template":698},"transform-code-quality-and-compliance-with-automated-processes","content:en-us:blog:transform-code-quality-and-compliance-with-automated-processes.yml","Transform Code Quality And Compliance With Automated Processes","en-us/blog/transform-code-quality-and-compliance-with-automated-processes.yml","en-us/blog/transform-code-quality-and-compliance-with-automated-processes",{"_path":3908,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3909,"content":3915,"config":3920,"_id":3922,"_type":14,"title":3923,"_source":16,"_file":3924,"_stem":3925,"_extension":19},"/en-us/blog/tutorial-advanced-use-case-for-gitlab-pipeline-execution-policies",{"title":3910,"description":3911,"ogTitle":3910,"ogDescription":3911,"noIndex":6,"ogImage":3912,"ogUrl":3913,"ogSiteName":685,"ogType":686,"canonicalUrls":3913,"schema":3914},"Tutorial: Advanced use case for GitLab Pipeline Execution Policies","Learn how new GitLab Ultimate functionality can enforce a standardized pipeline across an organization for improved compliance.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098083/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_397632156_3Ldy1urjMStQCl4qnOBvE0_1750098083312.jpg","https://about.gitlab.com/blog/tutorial-advanced-use-case-for-gitlab-pipeline-execution-policies","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Tutorial: Advanced use case for GitLab Pipeline Execution Policies\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Dan Rabinovitz\"}],\n        \"datePublished\": \"2025-01-22\",\n      }",{"title":3910,"description":3911,"authors":3916,"heroImage":3912,"date":2471,"body":3918,"category":693,"tags":3919},[3917],"Dan Rabinovitz","[Pipeline execution\npolicies](https://docs.gitlab.com/ee/user/application_security/policies/pipeline_execution_policies.html)\nare a newer addition to the GitLab DevSecOps platform and a powerful\nmechanism to enforce CI/CD jobs across applicable projects. They enable\nplatform engineering or security teams to inject jobs into developers’ YAML\npipeline definition files, guaranteeing that certain CI/CD jobs will execute\nno matter what a developer defines in their \\`.gitlab-ci.yml\\` file. \n\n\nThis article will explain how to utilize pipeline execution policies to\ncreate guardrails around the stages or jobs that a developer can use in\ntheir pipeline definition. In regulated environments, this may be necessary\nto ensure developers adhere to a standard set of jobs or stages in their\nGitLab pipeline. Any job or stage that a developer adds to their pipeline\nthat does not adhere to a corporate standard will cause the pipeline to\nfail. \n\n\nOne example use case for pipeline execution policies is ensuring a security\nscanner job runs. Let’s say an organization has made an investment in a\nthird-party security scanner and they have a requirement that the external\nscan runs before any merge is made into the main branch. Without a pipeline\nexecution policy, a developer could easily skip this step by not including\nthe required code in their `.gitlab-ci.yml` file.  With a pipeline execution\npolicy in place, a security team can guarantee the external security\nscanning job executes regardless of how a developer defines their pipeline.\n\n\nTo use pipeline execution policies to enforce these restrictions requires\ntwo parts: a shell script to make calls to the GitLab API and the policy\nitself. This tutorial uses a bash script; if your runner uses a different\nscripting language, it is easy to adapt to other languages.\n\n\nHere is the example shell script I will use for this exercise:\n\n\n``` \n\n#!/bin/bash\n\n\necho \"Checking pipeline stages and jobs...\"\n\n\n# Pull the group access token from the environment variable\n\nGROUP_ACCESS_TOKEN=\"$PIPELINE_TOKEN\"\n\n\necho \"PROJECT_ID: $PROJECT_ID\"\n\necho \"PIPELINE_ID: $PIPELINE_ID\"\n\n\nif [ -z \"$GROUP_ACCESS_TOKEN\" ]; then  \n  echo \"GROUP_ACCESS_TOKEN (MR_GENERATOR) is not set\"\n  exit 1\nfi\n\n\nif [ -z \"$PROJECT_ID\" ]; then\n  echo \"PROJECT_ID is not set\"\n  exit 1\nfi\n\n\nif [ -z \"$PIPELINE_ID\" ]; then\n  echo \"PIPELINE_ID is not set\"\n  exit 1\nfi\n\n\n# Use the group access token for the API request\n\napi_url=\"$GITLAB_API_URL/projects/$PROJECT_ID/pipelines/$PIPELINE_ID/jobs\"\n\necho \"API URL: $api_url\"\n\n\n# Fetch pipeline jobs using the group access token\n\njobs=$(curl --silent --header \"PRIVATE-TOKEN: $GROUP_ACCESS_TOKEN\"\n\"$api_url\")\n\necho \"Fetched Jobs: $jobs\"\n\n\nif [[ \"$jobs\" == *\"404 Project Not Found\"* ]]; then\n  echo \"Failed to authenticate with GitLab API: Project not found\"\n  exit 1\nfi\n\n\n# Extract stages and jobs\n\npipeline_stages=$(echo \"$jobs\" | grep -o '\"stage\":\"[^\"]*\"' | cut -d '\"' -f 4\n| sort | uniq | tr '\\n' ',')\n\npipeline_jobs=$(echo \"$jobs\" | grep -o '\"name\":\"[^\"]*\"' | cut -d '\"' -f 4 |\nsort | uniq | tr '\\n' ',')\n\n\necho \"Pipeline Stages: $pipeline_stages\"  \n\necho \"Pipeline Jobs: $pipeline_jobs\"\n\n\n# Check if pipeline stages are approved\n\nfor stage in $(echo $pipeline_stages | tr ',' ' '); do \n  echo \"Checking stage: $stage\"\n  if ! [[ \",$APPROVED_STAGES,\" =~ \",$stage,\" ]]; then\n    echo \"Stage $stage is not approved.\"\n    exit 1\n  fi\ndone\n\n\n# Check if pipeline jobs are approved \n\nfor job in $(echo $pipeline_jobs | tr ',' ' '); do\n  echo \"Checking job: $job\"\n  if ! [[ \",$APPROVED_JOBS,\" =~ \",$job,\" ]]; then\n    echo \"Job $job is not approve\n```\n\n\nLet’s break this down a bit. \n\n\nThe first few lines of this code perform some sanity checks, ensuring that a\npipeline ID, project ID, and group access token exist.\n\n\n* A GitLab pipeline ID is a unique numerical identifier that GitLab\nautomatically assigns to each pipeline run.\n\n* A GitLab project ID is a unique numerical identifier assigned to each\nproject in GitLab.\n\n* A GitLab group access token is a token that authenticates and authorizes\naccess to resources at the group level in GitLab. This is in contrast to a\nGitLab personal access token (PAT), which is unique to each user.  \n\n\nThe bulk of the work comes from the [GitLab Projects\nAPI](https://docs.gitlab.com/ee/api/projects.html) call where the script\nrequests the jobs for the specified pipeline. Once you have job information\nfor the currently running pipeline, you can use a simple grep command to\nparse out stage and job names, and store them in variables for comparison.\nThe last portion of the script checks to see if pipeline stages and jobs are\non the approved list. Where do these parameters come from?\n\n\nThis is where [GitLab Pipeline Execution\nPolicies](https://docs.gitlab.com/ee/user/application_security/policies/pipeline_execution_policies.html)\ncome into play. They enable injection of YAML code into a pipeline. How can\nwe leverage injected YAML to execute this shell script?  Here’s a code\nsnippet showing how to do this.\n\n\n```\n\n## With this config, the goal is to create a pre-check job that evaluates\nthe pipeline and fails the job/pipeline if any checks do not pass\n\n\nvariables:\n  GITLAB_API_URL: \"https://gitlab.com/api/v4\"\n  PROJECT_ID: $CI_PROJECT_ID\n  PIPELINE_ID: $CI_PIPELINE_ID\n  APPROVED_STAGES: \".pipeline-policy-pre,pre_check,build,test,deploy\"\n  APPROVED_JOBS: \"pre_check,build_job,test_job,deploy_job\"\n\npre_check:\n  stage: .pipeline-policy-pre\n  script:\n    - curl -H \"PRIVATE-TOKEN:${REPO_ACCESS_TOKEN}\" --url \"https://\u003Cgitlab_URL>/api/v4/projects/\u003Cproject_id>/repository/files/check_settings.sh/raw\" -o pre-check.sh\n    - ls -l\n    - chmod +x pre-check.sh\n    - DEBUG_MODE=false ./pre-check.sh  # Set DEBUG_MODE to true or false\n  allow_failure: true\n```\n\n\nIn this YAML snippet, we set a few variables used in the shell script. Most\nimportantly, this is where approved stages and approved jobs are defined.\nAfter the `variables` section, we then add a new job to the\n`.pipeline-policy-pre` stage. This is a reserved stage for pipeline\nexecution policies and is guaranteed to execute before any stages defined in\na `.gitlab-ci.yml` file.  There is a corresponding `.pipeline-policy-post`\nstage as well, though we will not be using it in this scenario.  \n\n\nThe script portion of the job does the actual work. Here, we leverage a curl\ncommand to execute the shell script defined above. This example includes\nauthentication if it’s located in a private repository. However, if it’s\npublicly accessible, you can forgo this authentication. The last line\ncontrols whether or not the pipeline will fail. In this example, the\npipeline will continue. This is useful for testing – in practice, you would\nlikely set `allow_failure: false` to cause the pipeline to fail. This is\ndesired as the goal of this exercise is to not allow pipelines to continue\nexecution if a developer adds a rogue job or stage.\n\n\nTo utilize this YAML, save it to a `.yml` file in a repository of your\nchoice. We’ll see how to connect it to a policy shortly.\n\n\nNow, we have our script and our YAML to inject into a developer’s pipeline.\nNext, let’s see how to put this together using a pipeline execution policy.\n\n\nLike creating other policies in GitLab, start by creating a new Pipeline\nExecution Policy by navigating to **Secure > Policies** in the left hand\nnavigation menu. Then, choose **New Policy** at the top right, and select\n**Pipeline Execution Policy** from the policy creation options.  \n\n\nFor this exercise, you can leave the **Policy Scope** set to the default\noptions. In the **Actions** section, be sure to choose **Inject** and select\nthe project and file where you’ve saved your YAML code snippet. Click on\n**Update via Merge Request** at the very bottom to create an MR that you can\nthen merge into your project.\n\n\nIf this is your first security policy, clicking on **Merge** in the MR will\ncreate a [Security Policy\nProject](https://docs.gitlab.com/ee/user/application_security/policies/vulnerability_management_policy.html),\nwhich is a project to store all security policies. When implementing any\ntype of security policy in a production environment, [access to this project\nshould be restricted](https://docs.gitlab.com/ee/user/project/members/) so\ndevelopers cannot make changes to security policies. In fact, you may also\nwant to consider storing YAML code that’s used by pipeline execution\npolicies in this project to restrict access as well, though this is not a\nrequirement.  \n\nExecuting a pipeline where this pipeline execution policy is enabled should\nresult in the following output when you attempt to add an invalid stage to\nthe project `.gitlab-ci.yml` file.\n\n\n![Output of attempting an invalid stage to project gitlab-ci.yml\nfile](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098102/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098102394.png)\n\n\nWhile this use case is very focused on one aspect of security and compliance\nin your organization, this opens the door to other use cases. For example,\nyou may want to make group-level variables accessible to every project\nwithin a group; this is possible with pipeline execution policies. Or, you\nmay want to create a golden pipeline and have developers add to it. The\npossibilities are endless. GitLab customers are finding new and exciting\nways to use this new functionality every day.\n\n\nIf you’re a GitLab Ultimate customer, try this out today and let us know how\nyou’re using pipeline execution policies. Not a GitLab Ultimate customer?\n[Sign up for a free\ntrial](https://about.gitlab.com/free-trial/devsecops/) to get started.\n\n\n## Read more\n\n- [How to integrate custom security scanners into\nGitLab](https://about.gitlab.com/blog/how-to-integrate-custom-security-scanners-into-gitlab/)\n\n- [Integrate external security scanners into your DevSecOps\nworkflow](https://about.gitlab.com/blog/integrate-external-security-scanners-into-your-devsecops-workflow/)\n\n- [Why GitLab is deprecating compliance pipelines in favor of security\npolicies](https://about.gitlab.com/blog/why-gitlab-is-deprecating-compliance-pipelines-in-favor-of-security-policies/)\n",[693,719,186,9,109,695],{"slug":3921,"featured":6,"template":698},"tutorial-advanced-use-case-for-gitlab-pipeline-execution-policies","content:en-us:blog:tutorial-advanced-use-case-for-gitlab-pipeline-execution-policies.yml","Tutorial Advanced Use Case For Gitlab Pipeline Execution Policies","en-us/blog/tutorial-advanced-use-case-for-gitlab-pipeline-execution-policies.yml","en-us/blog/tutorial-advanced-use-case-for-gitlab-pipeline-execution-policies",{"_path":3927,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3928,"content":3934,"config":3940,"_id":3942,"_type":14,"title":3943,"_source":16,"_file":3944,"_stem":3945,"_extension":19},"/en-us/blog/tutorial-migrate-from-google-cloud-source-repositories-to-gitlab",{"title":3929,"description":3930,"ogTitle":3929,"ogDescription":3930,"noIndex":6,"ogImage":3931,"ogUrl":3932,"ogSiteName":685,"ogType":686,"canonicalUrls":3932,"schema":3933},"Tutorial: Migrate from Google Cloud Source Repositories to GitLab","Google Cloud is deprecating Cloud Source Repositories. Learn how to migrate a CSR source code repository to GitLab, along with best practices.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097739/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2813%29_1zdtbfPDHZVe6JC2AbdHmb_1750097738370.png","https://about.gitlab.com/blog/tutorial-migrate-from-google-cloud-source-repositories-to-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Tutorial: Migrate from Google Cloud Source Repositories to GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Tsukasa Komatsubara\"},{\"@type\":\"Person\",\"name\":\"Regnard Raquedan\"}],\n        \"datePublished\": \"2024-08-28\",\n      }",{"title":3929,"description":3930,"authors":3935,"heroImage":3931,"date":3937,"body":3938,"category":717,"tags":3939},[3936,1964],"Tsukasa Komatsubara","2024-08-28","Google Cloud’s [deprecation of Cloud Source\nRepositories](https://cloud.google.com/source-repositories/docs/release-notes)\n(CSR) has prompted development teams to seek a full-featured alternative for\ntheir source code repositories. GitLab, a [Google Cloud Technology\nPartner](https://cloud.google.com/find-a-partner/partner/gitlab-inc), is a\nstrong choice due to its comprehensive DevSecOps capabilities.\n\n\nIn this tutorial, you'll learn the steps to ensure a smooth transition from\nCSR to GitLab, whether you're using GitLab.com or a self-managed instance on\nGoogle Cloud.\n\n\n## Why GitLab?\n\nTransitioning from Google Cloud Source Repositories to GitLab is a\nrecommended step. As a strategic partner of Google Cloud, GitLab seamlessly\nintegrates with existing infrastructure with ease and brings value to\ncustomers in the following ways:\n\n- **Unified DevSecOps platform**\n    - Consolidate your entire development lifecycle into a single application, from planning to monitoring. Eliminate tool sprawl and dramatically boost productivity.\n- **Seamless Google Cloud integration**\n    - Effortlessly connect with GKE, Cloud Build, and Cloud Storage, ensuring a smooth migration and efficient operations within the Google Cloud ecosystem.\n- **Advanced CI/CD capabilities**\n    - Leverage [Auto DevOps](https://docs.gitlab.com/ee/topics/autodevops/) to automate everything from security scanning to deployment, accelerating your development cycles.\n- **Industry-recognized AI coding assistance**\n    - Benefit from built-in AI-assisted development with [GitLab Duo](https://about.gitlab.com/gitlab-duo/), fostering a secure and efficient coding environment.\n\n## Prerequisites\n\n\nBefore you start the migration, ensure you have:\n\n- GitLab account: Set up your account on GitLab.com or on a self-hosted\ninstance.\n\n- GitLab project: Create a blank project in GitLab where the CSR repository\nwill be migrated.\n\n\n## Migration steps\n\n\n1. Create a blank GitLab project: This will serve as the destination for\nyour migrated CSR repository. Keep this project empty for now.\n\n2. Generate a personal access token (PAT): Navigate to GitLab settings and\n[generate a\nPAT](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html)\nwith `read_repository` and `write_repository` scopes enabled. This token\nwill be used to authenticate your Git operations during the migration\nprocess.\n\n3. Edit code in Cloud Shell Editor: From your CSR repository, open the Cloud\nShell Editor by clicking the “Edit code” button. You’ll need to authorize\nthe Cloud Shell and select “Trust repo” to proceed.\n\n\n![Google Cloud Shell\nEditor](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097750/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097750517.png)\n\n\n4. Inspect Git status: Run `git status` in the Cloud Shell to check the\ncurrent branch and ensure everything is in order before pushing to GitLab.\n\n\n![Inspect Git\nstatus](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097750/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097750518.png)\n\n\n5. Set Up the Remote Repository: Add your GitLab project as a remote\nrepository by running:\n\n\n```\n\ngit remote add origin [GITLAB_PROJECT_URL]\n\n\n```\n\n\n6. Replace `[GITLAB_PROJECT_URL]` with the actual URL of your GitLab\nproject.\n\nPush to GitLab: Finally, push your local repository to GitLab by running: \n\n\n```\n\ngit push -u origin [BRANCH_NAME]\n\n\n```\n\n\n7. Replace `[BRANCH_NAME]` with the current branch name you noted earlier.\n\nWhen prompted, use your GitLab username and the PAT as the password to\nauthenticate and complete the push.\n\n\n## Best practices\n\n\n- Back up before you begin: Always back up your CSR repository before\nstarting the migration process.\n\n- Test after migration: Ensure all aspects of the repository, including\nbranches and CI/CD pipelines, are functioning as expected in GitLab.\n\n- Leverage GitLab features: Take advantage of GitLab’s advanced DevSecOps\nfeatures such as [AI](https://about.gitlab.com/gitlab-duo/),\n[CI/CD](https://docs.gitlab.com/ee/ci/), and [Enterprise Agile\nplanning](https://about.gitlab.com/solutions/agile-delivery/) to enhance\nyour development workflow.\n\n\nMoving from Google Cloud Source Repositories to GitLab is easy and offers\nmore benefits than just managing source code. GitLab, with its integration\nwith Google Cloud, makes it an ideal choice for developers seeking to\nenhance their workflow post-migration.\n\n\n> Read more about [GitLab's integration with Google\nCloud](https://about.gitlab.com/blog/gitlab-google-cloud-integrations-now-in-public-beta/).\n",[719,1968,9],{"slug":3941,"featured":6,"template":698},"tutorial-migrate-from-google-cloud-source-repositories-to-gitlab","content:en-us:blog:tutorial-migrate-from-google-cloud-source-repositories-to-gitlab.yml","Tutorial Migrate From Google Cloud Source Repositories To Gitlab","en-us/blog/tutorial-migrate-from-google-cloud-source-repositories-to-gitlab.yml","en-us/blog/tutorial-migrate-from-google-cloud-source-repositories-to-gitlab",{"_path":3947,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3948,"content":3953,"config":3957,"_id":3959,"_type":14,"title":3960,"_source":16,"_file":3961,"_stem":3962,"_extension":19},"/en-us/blog/tutorial-secure-and-optimize-your-maven-repository-in-gitlab",{"title":3949,"description":3950,"ogTitle":3949,"ogDescription":3950,"noIndex":6,"ogImage":3111,"ogUrl":3951,"ogSiteName":685,"ogType":686,"canonicalUrls":3951,"schema":3952},"Tutorial: Secure and optimize your Maven Repository in GitLab","Learn the best practices, advanced techniques, and upcoming features that improve the efficiency of your DevSecOps workflow.","https://about.gitlab.com/blog/tutorial-secure-and-optimize-your-maven-repository-in-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Tutorial: Secure and optimize your Maven Repository in GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Tim Rizzi\"}],\n        \"datePublished\": \"2025-05-22\",\n      }",{"title":3949,"description":3950,"authors":3954,"heroImage":3111,"date":2844,"body":3955,"category":693,"tags":3956},[925],"As a GitLab product manager, I'm excited to share insights on securing and optimizing your Maven repository. We're passionate about providing a complete DevSecOps platform, and the Maven repository is part of this ecosystem. Explore best practices, advanced techniques, and upcoming features that will transform your Maven workflow.\n\n## Securing your Maven repository: A comprehensive approach\n\nSecuring your software supply chain is more critical than ever so let's dive into strategies to fortify your Maven packages in GitLab.\n\n### Implement strong authentication\n\n**Personal access tokens:** Use PATs for fine-grained access control.\n\nFor example:\n\n```bash\nmvn deploy -s settings.xml\n```\n\nWhere `settings.xml` contains:\n\n```xml\n\u003Csettings>\n  \u003Cservers>\n    \u003Cserver>\n      \u003Cid>gitlab-maven\u003C/id>\n      \u003Cconfiguration>\n        \u003ChttpHeaders>\n          \u003Cproperty>\n            \u003Cname>Private-Token\u003C/name>\n            \u003Cvalue>${env.GITLAB_PERSONAL_TOKEN}\u003C/value>\n          \u003C/property>\n        \u003C/httpHeaders>\n      \u003C/configuration>\n    \u003C/server>\n  \u003C/servers>\n\u003C/settings>\n```\n\n**Deploy tokens:** Ideal for CI/CD pipelines. Generate these in your GitLab project settings and use them in your `.gitlab-ci.yml`.\n\n```yaml\ndeploy:\n  script:\n    - 'mvn deploy -s ci_settings.xml'\n  variables:\n    MAVEN_CLI_OPTS: \"-s ci_settings.xml --batch-mode\"\n    MAVEN_OPTS: \"-Dmaven.repo.local=.m2/repository\"\n  only:\n    - main\n```\n\nThe corresponding `ci_settings.xml` file:\n\n```xml\n\u003Csettings xmlns=\"http://maven.apache.org/SETTINGS/1.1.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n  xsi:schemaLocation=\"http://maven.apache.org/SETTINGS/1.1.0 http://maven.apache.org/xsd/settings-1.1.0.xsd\">\n  \u003Cservers>\n    \u003Cserver>\n      \u003Cid>gitlab-maven\u003C/id>\n      \u003Cconfiguration>\n        \u003ChttpHeaders>\n          \u003Cproperty>\n            \u003Cname>Deploy-Token\u003C/name>\n            \u003Cvalue>${env.CI_DEPLOY_PASSWORD}\u003C/value>\n          \u003C/property>\n        \u003C/httpHeaders>\n      \u003C/configuration>\n    \u003C/server>\n  \u003C/servers>\n\u003C/settings>\n```\n\nIn this setup:\n\n* The `CI_DEPLOY_PASSWORD` should be set as a CI/CD variable in your GitLab project settings containing the deploy token.\n* The `\u003Cid>` should match the repository ID in your project's `pom.xml` file.\n\n**Token rotation:** Implement a token rotation policy using GitLab's API. For example, you could create a scheduled pipeline that rotates tokens monthly:\n\n```yaml\nrotate_tokens:\n  script:\n    - curl --request POST \"https://gitlab.example.com/api/v4/projects/${CI_PROJECT_ID}/deploy_tokens\" --header \"PRIVATE-TOKEN: ${ADMIN_TOKEN}\" --form \"name=maven-deploy-${CI_PIPELINE_ID}\" --form \"scopes[]=read_registry\" --form \"scopes[]=write_registry\"\n  only:\n    - schedules\n```\n\n### Leverage GitLab's built-in security features\n\n**Dependency Scanning:** Enable it in your `.gitlab-ci.yml`.\n\n```yaml\ninclude:\n  - template: Security/Dependency-Scanning.gitlab-ci.yml\n\nvariables:\n  DS_JAVA_VERSION: 11\n```\n\n**Container Scanning:** If you're containerizing your Maven applications.\n\n```yaml\ninclude:\n  - template: Security/Container-Scanning.gitlab-ci.yml\n\nvariables:\n  CS_IMAGE: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA\n```\n\n**License Compliance:** Ensure all dependencies comply with your project's licensing requirements.\n\n```yaml\ninclude:\n  - template: Security/License-Scanning.gitlab-ci.yml\n```\n\n### Secure your CI/CD pipeline\n\n* **CI/CD variables:** Store sensitive information securely.\n\n  ```yaml\n  variables:\n    MAVEN_REPO_USER: ${CI_DEPLOY_USER}\n    MAVEN_REPO_PASS: ${CI_DEPLOY_PASSWORD}\n  ```\n* **Masked variables:** Prevent exposure in job logs. Set these in your GitLab CI/CD settings.\n* **Protected branches and tags:** Configure these in your GitLab project settings to control who can trigger package publishing.\n\n### Implement package signing\n\n* Use the Maven GPG plugin to sign your artifacts.\n\n  ```xml\n  \u003Cplugin>\n    \u003CgroupId>org.apache.maven.plugins\u003C/groupId>\n    \u003CartifactId>maven-gpg-plugin\u003C/artifactId>\n    \u003Cversion>1.6\u003C/version>\n    \u003Cexecutions>\n      \u003Cexecution>\n        \u003Cid>sign-artifacts\u003C/id>\n        \u003Cphase>verify\u003C/phase>\n        \u003Cgoals>\n          \u003Cgoal>sign\u003C/goal>\n        \u003C/goals>\n      \u003C/execution>\n    \u003C/executions>\n  \u003C/plugin>\n  ```\n\n* Store your GPG key securely using GitLab CI/CD variables.\n\n### Control package access\n\n* Use GitLab's project and group-level package registry settings to restrict access.\n* Implement IP allowlists for network-level access control in your GitLab instance settings.\n\n## Optimize performance: Streamline your Maven workflow\n\nEfficiency is crucial when working with large projects or numerous dependencies. Here are advanced techniques to optimize your Maven package usage in GitLab.\n\n### Utilize dependency management\n\n* Use the `\u003CdependencyManagement>` section in your parent POM.\n\n  ```xml\n  \u003CdependencyManagement>\n    \u003Cdependencies>\n      \u003Cdependency>\n        \u003CgroupId>org.springframework.boot\u003C/groupId>\n        \u003CartifactId>spring-boot-dependencies\u003C/artifactId>\n        \u003Cversion>${spring-boot.version}\u003C/version>\n        \u003Ctype>pom\u003C/type>\n        \u003Cscope>import\u003C/scope>\n      \u003C/dependency>\n    \u003C/dependencies>\n  \u003C/dependencyManagement>\n  ```\n### Leverage multi-module projects\n\n  * Structure your project with a parent POM and multiple modules:\n\n    ```\n    my-project/\n    ├── pom.xml\n    ├── module1/\n    │   └── pom.xml\n    ├── module2/\n    │   └── pom.xml\n    └── module3/\n        └── pom.xml\n    ```\n  * Use Maven's reactor to build modules in the optimal order:\n\n    ```bash\n    mvn clean install\n    ```\n\n### Implement parallel builds\n\n* Use Maven's parallel build feature:\n\n  ```bash\n  mvn -T 4C clean install\n  ```\n\n### Optimize for CI/CD\n\n* In `.gitlab-ci.yml`, use caching to speed up builds:\n\n  ```yaml\n  cache:\n    paths:\n      - .m2/repository\n\n  build:\n    script:\n      - mvn clean package -Dmaven.repo.local=$CI_PROJECT_DIR/.m2/repository\n  ```\n* Implement incremental builds:\n\n  ```yaml\n  build:\n    script:\n      - mvn clean install -Dmaven.repo.local=$CI_PROJECT_DIR/.m2/repository -am -amd -fae\n  ```\n\n### Utilize build caching\n\n* Use the Gradle Enterprise Maven Extension for build caching:\n\n  ```xml\n  \u003Cbuild>\n    \u003Cplugins>\n      \u003Cplugin>\n        \u003CgroupId>com.gradle\u003C/groupId>\n        \u003CartifactId>gradle-enterprise-maven-plugin\u003C/artifactId>\n        \u003Cversion>1.9\u003C/version>\n        \u003Cconfiguration>\n          \u003CgradleEnterprise>\n            \u003Cserver>https://ge.example.com\u003C/server>\n            \u003CallowUntrusted>false\u003C/allowUntrusted>\n          \u003C/gradleEnterprise>\n        \u003C/configuration>\n      \u003C/plugin>\n    \u003C/plugins>\n  \u003C/build>\n  ```\n\n## Introducing the Maven Virtual Registry beta program\n\nI'm thrilled to announce the launch of our beta program for the upcoming Maven virtual registry feature. This addition to our package ecosystem will change how you manage Maven repositories in GitLab.\n\n### Key features of Maven Virtual Registry\n\n1. **Repository aggregation:** Combine multiple Maven repositories (both internal and external) into a single virtual repository.\n2. **Smart proxy and caching:** Improve build times by caching artifacts and intelligently routing requests.\n3. **Centralized Access Control:** Enhance security by managing access to all repositories from a single point.\n\n### How it works\n\n1. **Configuration:** Configure Maven authentication in your `settings.xml`:\n\n```\n\u003Csettings>\n  \u003Cservers>\n    \u003Cserver>\n      \u003Cid>gitlab-maven\u003C/id>\n      \u003Cconfiguration>\n        \u003ChttpHeaders>\n          \u003Cproperty>\n            \u003Cname>Private-Token\u003C/name>\n            \u003Cvalue>${env.GITLAB_TOKEN}\u003C/value>\n          \u003C/property>\n        \u003C/httpHeaders>\n      \u003C/configuration>\n    \u003C/server>\n  \u003C/servers>\n\u003C/settings>\n```\n\nAuthentication options:\n\n- Personal access token: Use `Private-Token` as the name and `${env.GITLAB_TOKEN}` as the value.\n\n-  Group deploy token: Use `Deploy-Token` as the name and `${env.GITLAB_DEPLOY_TOKEN}` as the value.\n\n- Group access token: Use `Private-Token` as the name and `${env.GITLAB_ACCESS_TOKEN}` as the value.\n\n- CI job token: Use `Job-Token` as the name and `${CI_JOB_TOKEN}` as the value.\n\n- Configure the virtual registry in your `pom.xml`.\n\nOption 1: As an additional registry:\n\n```\n\u003Crepositories>\n  \u003Crepository>\n    \u003Cid>gitlab-maven\u003C/id>\n    \u003Curl>https://gitlab.example.com/api/v4/virtual_registries/packages/maven/\u003Cvirtual registry id>\u003C/url>\n  \u003C/repository>\n\u003C/repositories>\n```\n\nOption 2: As a replacement for Maven Central (in your `settings.xml`):\n\n```\n\u003Cmirrors>\n  \u003Cmirror>\n    \u003Cid>gitlab-maven\u003C/id>\n    \u003Cname>GitLab virtual registry for Maven Central\u003C/name>\n    \u003Curl>https://gitlab.example.com/api/v4/virtual_registries/packages/maven/\u003Cvirtual registry id>\u003C/url>\n    \u003CmirrorOf>central\u003C/mirrorOf>\n  \u003C/mirror>\n\u003C/mirrors>\n```\n\n2. **Usage:** Now all your Maven operations will use the virtual repository.\n\n```\n# For personal access tokens\nexport GITLAB_TOKEN=your_personal_access_token\n\n# For group deploy tokens\nexport GITLAB_DEPLOY_TOKEN=your_deploy_token\n\n# For group access tokens\nexport GITLAB_ACCESS_TOKEN=your_access_token\n\n# Then run Maven commands normally\nmvn package\n\n```\n\n3. Benefits\n\n- Simplified dependency management\n- Improved build times\n- Enhanced security and compliance\n- Better control over third-party dependencies\n\n### Join the beta program\n\nWe're actively seeking participants for our beta program. As a beta tester, you'll have the opportunity to:\n\n* Get early access to the Maven Virtual Registry feature.\n* Provide direct feedback to our development team.\n* Shape the future of Maven package management in GitLab.\n* Participate in exclusive webinars and Q&A sessions with our product team.\n\n> To join the beta program or learn more about the Maven Virtual Registry, please visit the [GitLab Maven Virtual Registry Beta Program](https://gitlab.com/gitlab-org/gitlab/-/issues/498139) (**Note:** This is a placeholder link).\n\n## Summary\n\nAt GitLab, we're committed to providing cutting-edge tools for secure, efficient, and scalable software development. The Maven Virtual Registry is just one example of how we're continuously innovating to meet the evolving needs of developers and platform engineers.\n\nImplementing the security measures and optimization techniques discussed in this post and leveraging upcoming features like the Maven Virtual Registry can improve your Maven workflow within GitLab.\n\nWe're excited about the future of package management in GitLab and can't wait to see how you'll use these features to take your development process to the next level. Stay tuned for more updates and happy coding!",[693,9,695,717,888],{"slug":3958,"featured":91,"template":698},"tutorial-secure-and-optimize-your-maven-repository-in-gitlab","content:en-us:blog:tutorial-secure-and-optimize-your-maven-repository-in-gitlab.yml","Tutorial Secure And Optimize Your Maven Repository In Gitlab","en-us/blog/tutorial-secure-and-optimize-your-maven-repository-in-gitlab.yml","en-us/blog/tutorial-secure-and-optimize-your-maven-repository-in-gitlab",{"_path":3964,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3965,"content":3970,"config":3975,"_id":3977,"_type":14,"title":3978,"_source":16,"_file":3979,"_stem":3980,"_extension":19},"/en-us/blog/tutorial-secure-bigquery-data-publishing-with-gitlab",{"title":3966,"description":3967,"ogTitle":3966,"ogDescription":3967,"noIndex":6,"ogImage":708,"ogUrl":3968,"ogSiteName":685,"ogType":686,"canonicalUrls":3968,"schema":3969},"Tutorial: Secure BigQuery data publishing with GitLab ","Learn how to create repeatable, auditable, and efficient processes for automating and securing BigQuery data exports.","https://about.gitlab.com/blog/tutorial-secure-bigquery-data-publishing-with-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Tutorial: Secure BigQuery data publishing with GitLab \",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Regnard Raquedan\"}],\n        \"datePublished\": \"2025-03-25\",\n      }",{"title":3966,"description":3967,"authors":3971,"heroImage":708,"date":3972,"body":3973,"category":928,"tags":3974},[1964],"2025-03-25","GitLab offers a powerful solution for automating and securing\n[BigQuery](https://cloud.google.com/bigquery) data exports. This integration\ntransforms manual exports into repeatable, auditable processes that can\neliminate security vulnerabilities while saving valuable time. This tutorial\nexplains how to implement this solution so you can quickly reduce manual\noperations, permission issues, and security concerns with just a few lines\nof GitLab YAML code.\n\n\nFollow along with this step-by-step video:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/gxXX-ItAreo?si=FijY9wMVppCW-18q\" frameborder=\"0\" allowfullscreen=\"true\">\u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n## The solution architecture\n\n\nOur solution leverages GitLab CI/CD pipelines to automate the secure export\nof data from BigQuery to Google Cloud Storage. Here's the high-level\narchitecture:\n\n\n1. SQL code is stored and version-controlled in GitLab.  \n\n2. After code review and approval, GitLab CI/CD pipeline executes the\ncode.  \n\n3. The pipeline authenticates with Google Cloud.  \n\n4. SQL queries are executed against BigQuery.  \n\n5. Results are exported as CSV files to Google Cloud Storage.  \n\n6. Secure links to these files are provided for authorized consumption.\n\n\n## Prerequisites\n\n\nBefore we begin, ensure you have:\n\n\n* **Google Cloud APIs enabled:** BigQuery API and Cloud Storage API  \n\n* **Service account** with appropriate permissions:  \n  * BigQuery Job User  \n  * Storage Admin  \n  * **Note:** For this demo, we're using the service account approach for authentication, which is simpler to set up. For production environments, you might consider using GitLab's identity and access management integration with Google Cloud. This integration leverages Workload Identity Federation, which provides enhanced security and is more suitable for enterprise customers and organizations.  \n* **GitLab project** ready to store your SQL code and pipeline configuration\n\n\n## Step-by-step implementation\n\n\n**1. Configure Google Cloud credentials.**\n\n\nFirst, set up the necessary environment variables in your GitLab project:\n\n\n- Go to your **GitLab project > Settings > CI/CD**.  \n\n- Expand the **Variables** section.  \n\n- Add the following variables:  \n   * `GCS_BUCKET`: Your Google Cloud Storage bucket name  \n   * `GCP_PROJECT_ID`: Your Google Cloud project ID  \n   * `GCP_SA_KEY`: Base64-encoded service account key (mark as masked)\n\n**2. Create your SQL query.**\n\n\nCreate a file named `query.sql` in your GitLab repository with your BigQuery\nSQL query. The query looks like this:\n\n\n```\n\n-- This query shows a list of the daily top Google Search terms.\n\nSELECT\n   refresh_date AS Day,\n   term AS Top_Term,\n       -- These search terms are in the top 25 in the US each day.\n   rank,\nFROM `bigquery-public-data.google_trends.top_terms`\n\nWHERE\n   rank = 1\n       -- Choose only the top term each day.\n   AND refresh_date >= DATE_SUB(CURRENT_DATE(), INTERVAL 2 WEEK)\n       -- Filter to the last 2 weeks.\nGROUP BY Day, Top_Term, rank\n\nORDER BY Day DESC\n   -- Show the days in reverse chronological order.\n\n```\n\n\nThis query gets the top 25 search terms from Google Trends for the current\nday.\n\n\n**3. Configure the GitLab CI/CD pipeline.**\n\n\nCreate a `.gitlab-ci.yml` file in your repository root:\n\n\n```\n\nimage: google/cloud-sdk:alpine\n\n\ninclude:\n  - template: Jobs/Secret-Detection.gitlab-ci.yml  # https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Jobs/Secret-Detection.gitlab-ci.yml\n\nexecute:\n  stage: deploy\n  script: \n    # Set up Google Cloud authentication and install necessary components\n    - export GOOGLE_CLOUD_CREDENTIALS=$(echo $SERVICE_ACCOUNT_KEY | base64 -d)\n    - echo $GOOGLE_CLOUD_CREDENTIALS > service-account-key.json \n    - gcloud auth activate-service-account --key-file service-account-key.json \n    - gcloud components install gsutil\n    # Set the active Google Cloud project\n    - gcloud config set project $PROJECT_ID\n    # Run the BigQuery query and export the results to a CSV file\n    - bq query --format=csv --use_legacy_sql=false \u003C test.sql > results.csv\n    # Create a Google Cloud Storage bucket if it doesn't exist\n    - gsutil ls gs://${CLOUD_STORAGE_BUCKET} || gsutil mb gs://${CLOUD_STORAGE_BUCKET}\n    # Upload the CSV file to the storage bucket\n    - gsutil cp results.csv gs://${CLOUD_STORAGE_BUCKET}/results.csv\n    # Set the access control list (ACL) to make the CSV file publicly readable\n    - gsutil acl ch -u AllUsers:R gs://${CLOUD_STORAGE_BUCKET}/results.csv\n    # Define the static URL for the CSV file\n    - export STATIC_URL=\"https://storage.googleapis.com/${CLOUD_STORAGE_BUCKET}/results.csv\"\n    # Display the static URL for the CSV file\n    - echo \"File URL = $STATIC_URL\"\n\n```\n\n\n**4. Run the pipeline.**\n\n\nNow, whenever changes are merged to your main branch, the pipeline will\nprovide a link to the CSV file stored on the Google Cloud Storage bucket.\nThis file contains the result of the executed SQL query that GitLab subjects\nto security checks.\n\n\n## Benefits of this approach\n\n\n* **Security:** Authentication is handled automatically via service accounts\n(or Workload Identity Federation for enhanced security in production\nenvironments).  \n\n* **Auditability:** All data exports are tracked through GitLab commits and\npipeline logs.  \n\n* **Repeatability:** Consistent, predictable export process on every run,\nand can be scheduled.  \n\n* **Version control:** SQL queries are properly versioned and reviewed.  \n\n* **Automation:** Significantly fewer manual exports, reducing human error.\n\n\n## Try it today\n\n\nBy combining GitLab's DevSecOps capabilities with Google Cloud's BigQuery\nand Cloud Storage, you've now automated and secured your data publishing\nworkflow. This approach reduces manual operations, resolves permission\nheadaches, and addresses security concerns – all achieved with just a few\nlines of GitLab CI code.\n\n\n> Use this tutorial's [complete code\nexample](https://gitlab.com/gitlab-partners-public/google-cloud/demos/big-query-data-publishing)\nto get started now.\n",[951,9,719,888,233,1968],{"slug":3976,"featured":91,"template":698},"tutorial-secure-bigquery-data-publishing-with-gitlab","content:en-us:blog:tutorial-secure-bigquery-data-publishing-with-gitlab.yml","Tutorial Secure Bigquery Data Publishing With Gitlab","en-us/blog/tutorial-secure-bigquery-data-publishing-with-gitlab.yml","en-us/blog/tutorial-secure-bigquery-data-publishing-with-gitlab",{"_path":3982,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":3983,"content":3989,"config":3994,"_id":3996,"_type":14,"title":3997,"_source":16,"_file":3998,"_stem":3999,"_extension":19},"/en-us/blog/tutorial-security-scanning-in-air-gapped-environments",{"title":3984,"description":3985,"ogTitle":3984,"ogDescription":3985,"noIndex":6,"ogImage":3986,"ogUrl":3987,"ogSiteName":685,"ogType":686,"canonicalUrls":3987,"schema":3988},"Tutorial: Security scanning in air-gapped environments","Security scanning remains crucial even in air-gapped environments to detect internal threats, prevent data exfiltration, and maintain operational integrity. Learn how GitLab can help get air-gapped environments secure.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099301/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_1097303277_6gTk7M1DNx0tFuovupVFB1_1750099300786.jpg","https://about.gitlab.com/blog/tutorial-security-scanning-in-air-gapped-environments","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Tutorial: Security scanning in air-gapped environments\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Fernando Diaz\"}],\n        \"datePublished\": \"2025-02-05\",\n      }",{"title":3984,"description":3985,"authors":3990,"heroImage":3986,"date":3991,"body":3992,"category":693,"tags":3993},[1599],"2025-02-05","Air-gapped environments are computer networks or systems that are physically\nisolated from unsecured networks, such as the public internet or unsecured\nlocal area networks. This isolation is implemented as a security measure to\nprotect sensitive data and critical systems from external cyber threats by\nproviding:\n\n\n* Enhanced security: By physically isolating systems from external networks,\nair-gapped environments help prevent remote attacks, malware infections, and\nunauthorized data access. This is crucial for highly sensitive data and\ncritical systems.\n\n* Data protection: Air-gapping provides the strongest protection against\ndata exfiltration since there's no direct connection that attackers could\nuse to steal information.\n\n* Critical infrastructure protection: For systems that control vital\ninfrastructure (like power plants, water treatment facilities, or military\nsystems), air-gapping helps prevent potentially catastrophic cyber attacks.\n\n* Compliance requirements: Many regulatory frameworks require air-gapping\nfor certain types of sensitive data or critical systems, particularly in\ngovernment, healthcare, and financial sectors.\n\n* Malware protection: Without network connectivity, systems are protected\nfrom network-based malware infections and ransomware attacks.\n\n\nEven though air-gapped systems are isolated, they can still have\nvulnerabilities. Regular security scanning helps identify these weaknesses\nbefore they can be exploited. In this article, you will learn the different\nsecurity scanners GitLab provides and how they can be added/updated in a\nlimited-connectivity environment.\n\n\n## GitLab security scanners in air-gapped environments\n\n\nGitLab provides a variety of different security scanners for the complete\napplication lifecycle. The scanners that support air-gapped environments\ninclude:\n\n\n* [Static Application Security Testing\n(SAST)](https://docs.gitlab.com/ee/user/application_security/sast/index.html#running-sast-in-an-offline-environment)  \n\n* [Dynamic Application Security Testing\n(DAST](https://docs.gitlab.com/ee/user/application_security/dast/browser/configuration/offline_configuration.html))  \n\n* [Secret\nDetection](https://docs.gitlab.com/ee/user/application_security/secret_detection/pipeline/index.html#offline-configuration)  \n\n* [Container\nScanning](https://docs.gitlab.com/ee/user/application_security/container_scanning/index.html#running-container-scanning-in-an-offline-environment)  \n\n* [Dependency\nScanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/index.html#offline-environment)  \n\n* [API\nFuzzing](https://docs.gitlab.com/ee/user/application_security/api_fuzzing/configuration/offline_configuration.html)  \n\n* [License\nScanning](https://docs.gitlab.com/ee/user/compliance/license_scanning_of_cyclonedx_files/index.html#running-in-an-offline-environment)\n\n\nBy default, GitLab Self-Managed instances pull security scanner images from\nthe public GitLab container registry (registry.gitlab.com) and store them\nwithin the [built-in local GitLab container\nregistry](https://docs.gitlab.com/ee/user/packages/container_registry/). I\nwill demonstrate this flow below by running the following pipeline that\nscans for secrets on a [sample\nproject](https://gitlab.com/gitlab-da/tutorials/security-and-governance/owasp/juice-shop): \n\n\n```yaml\n\ninclude:\n  - template: Jobs/Secret-Detection.gitlab-ci.yml\n```\n\n\nWhen running the job in an internet-connected GitLab instance the job\npasses:\n\n\n![GitLab Runner with internet access successfully pulling from external\nregistry\n\n](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099328/Blog/Content%20Images/Blog/Content%20Images/pass-1_aHR0cHM6_1750099328577.png)\n\n\n\u003Ccenter>\u003Ci>GitLab Runner with internet access successfully pulling from\nexternal registry\u003C/i>\u003C/center>\n\n\n\u003Cbr>\u003C/br>\n\nHowever, If I disable internet access to the VM running GitLab, the\n`secret-detection` job will fail to download the container image, causing\nthe job to fail:\n\n\n![GitLab Runner without internet access failing to pull from external\nregistry](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099328/Blog/Content%20Images/Blog/Content%20Images/fail-1_aHR0cHM6_1750099328577.png)\n\n\n\u003Ccenter>\u003Ci>GitLab Runner without internet access failing to pull from\nexternal registry\u003C/i>\u003C/center>\n\n\u003Cbr>\u003C/br>\n\n\nAlternatively, if I set my GitLab Runners’ pull image policy to\n`if-not-present` from `always`, I can load the cached version of the scanner\nif it was run before on the internet by using the image stored in our local\ndocker:\n\n\n![GitLab Runner without internet access successfully pulling from internal\nregistry\ncache](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099329/Blog/Content%20Images/Blog/Content%20Images/pass-2_aHR0cHM6_1750099328579.png)\n\n\n\u003Ccenter>\u003Ci>GitLab Runner without internet access successfully pulling from\ninternal registry cache\u003C/i>\u003C/center>\n\n\n\u003Cbr>\u003C/br>\n\n\n### Setting up offline scanning prerequisites\n\n\nRunning these security scanners in an air-gapped environment requires the\nfollowing:\n\n\n* [GitLab Ultimate\nsubscription](https://about.gitlab.com/pricing/ultimate/)  \n\n* [Offline cloud\nlicense](https://about.gitlab.com/pricing/licensing-faq/cloud-licensing/#offline-cloud-licensing)  \n\n* GitLab Self-Managed cluster\n\n\nYou can follow along with this tutorial in any GitLab Self-Managed EE\ninstance (even those that are not air-gapped) to learn how to transfer and\nrun images in an air-gapped environment. In this tutorial, I will\ndemonstrate how to load scanner images onto a GitLab-EE instance running in\na Google Compute VM where I cut off the `EGRESS` to everything by\nimplementing firewall rules:\n\n\n```bash\n\n# egress firewall rule to block all outbound traffic to the internet\n\n$ gcloud compute firewall-rules create deny-internet-egress \\\n    --direction=EGRESS \\\n    --priority=1000 \\\n    --network=default \\\n    --action=DENY \\\n    --rules=all \\\n    --destination-ranges=0.0.0.0/0 \\\n    --target-tags=no-internet\n\n# Create an allow rule for internal traffic with higher priority\n\n$ gcloud compute firewall-rules create allow-internal-egress \\\n    --direction=EGRESS \\\n    --priority=900 \\\n    --network=default \\\n    --action=ALLOW \\\n    --rules=all \\\n    --destination-ranges=10.0.0.0/8,192.168.0.0/16,172.16.0.0/12 \\\n    --target-tags=no-internet\n\n# Apply tag to VM\n\n$ gcloud compute instances add-tags YOUR_VM_NAME \\\n    --zone=YOUR_ZONE \\\n    --tags=no-internet\n```\n\n\nThen, once I SSH into my VM, you can see we cannot connect to\nregistry.gitlab.com:\n\n\n```bash\n\n# showing I can’t access the gitlab container registry\n\n$ ping registry.gitlab.com\n\nPING registry.gitlab.com (35.227.35.254) 56(84) bytes of data.\n\n^C\n\n--- registry.gitlab.com ping statistics ---\n\n3 packets transmitted, 0 received, 100% packet loss, time 2031ms\n\n```\n\n\n**Note:** I am still allowing ingress so I can copy files and SSH into the\nmachine.\n\n\n## Load security scanners in air-gapped environments\n\n\nTo use the various security scanners on air-gapped environments, the GitLab\nRunner must be able to fetch the scanner container images from GitLab’s\nbuilt-in container registry. This means that the container images for the\nsecurity scanners must be downloaded and packaged in a separate environment\nwith access to the public internet. The process of loading security scanners\nonto an air-gapped environment includes the following:\n\n\n1. Download and package container images from the public internet.\n\n2. Transfer images to offline environment.\n\n3. Load transferred images into offline container registry.\n\n\nNow let’s go over how we can implement GitLab Secret Detection in an\nair-gapped environment.\n\n\n### Download and package container images from public internet\n\n\nLet’s download the container image for secret detection and store it within\nour local container registry. Other scanner images can be found in the\n[offline deployments\ndocumentation](https://docs.gitlab.com/ee/user/application_security/offline_deployments/).\nI will be using Podman desktop to download these images, but you can use\nDocker desktop or other alternatives.\n\n\n1. Pull the GitLab Secret Detection image.\n\n\n```bash\n\n$ podman pull registry.gitlab.com/security-products/secrets:6\n\nTrying to pull registry.gitlab.com/security-products/secrets:6...\n\nGetting image source signatures\n\nCopying blob\nsha256:999745130ac045f2b1c29ecce088b43fc4a95bbb82b7960fb7b8abe0e3801bf8\n\nCopying blob\nsha256:a4f7c013bb259c146cd8455b7c3943df7ed84b157e42a2348eef16546d8179b1\n\nCopying blob\nsha256:1f3e46996e2966e4faa5846e56e76e3748b7315e2ded61476c24403d592134f0\n\nCopying blob\nsha256:400a41f248eb3c870bd2b07073632c49f1e164c8efad56ea3b24098a657ec625\n\nCopying blob\nsha256:9090f17a5a1bb80bcc6f393b0715210568dd0a7749286e3334a1a08fb32d34e6\n\nCopying blob\nsha256:c7569783959081164164780f6c1b0bbe1271ee8d291d3e07b2749ae741621ea3\n\nCopying blob\nsha256:20c7ca6108f808ad5905f6db4f7e3c02b21b69abdea8b45abfa34c0a2ba8bdb5\n\nCopying blob\nsha256:e8645a00be64d77c6ff301593ce34cd8c17ffb2b36252ca0f2588009a7918d2e\n\nCopying config\nsha256:0235ed43fc7fb2852c76e2d6196601968ae0375c72a517bef714cd712600f894\n\nWriting manifest to image destination\n\nWARNING: image platform (linux/amd64) does not match the expected platform\n(linux/arm64)\n\n0235ed43fc7fb2852c76e2d6196601968ae0375c72a517bef714cd712600f894\n\n\n$ podman images\n\nREPOSITORY                                                  TAG        \nIMAGE ID      CREATED      SIZE\n\nregistry.gitlab.com/security-products/secrets               6          \n0235ed43fc7f  4 hours ago  85.3 MB\n\n```\n\n\n2. Save the image as a tarball.\n\n\n```bash\n\n$ podman save -o secret-detection.tar\nregistry.gitlab.com/security-products/secrets:6\n\n$ chmod +r secret-detection.tar\n\n$ ls -al secret-detection.tar\n\n-rw-r--r--@ 1 fern  staff  85324800 Jan 10 10:25 secret-detection.tar\n\n```\n\n\nAlternatively, you can use the [official GitLab\ntemplate](https://docs.gitlab.com/ee/user/application_security/offline_deployments/#using-the-official-gitlab-template)\non an environment with internet access to download the container images\nneeded for the security scanners and save them as job artifacts or push them\nto the container registry of the project where the pipeline is executed. \n\n\n### Transfer images to offline environment\n\n\nNext, let's transfer the tarball to our air-gapped environment. This can be\ndone in several ways, depending on your needs, such as:\n\n\n* Physical media transfer  \n\n* Data diodes  \n\n* Guard systems  \n\n* Cross-domain solutions (CDS) \n\n\nI will SCP (Secure Copy Protocol) the tarball directly to my VM that does\nnot have egress access, but does allow ingress. As this is just for\ndemonstration purposes, make sure to consult your organization's security\npolicies and transfer procedures for air-gapped environments.\n\n\n#### Verify the image is not cached\n\n\nBefore transferring the file, I’ll delete the Docker images on my GitLab\ninstance pertaining to secret detection to make sure they aren't cached:\n\n\n```bash\n\n$ docker images\n\nREPOSITORY                                                         \nTAG              IMAGE ID       CREATED        SIZE\n\nregistry.gitlab.com/security-products/secrets                      \n6                0235ed43fc7f   9 hours ago    84.8MB\n\nregistry.gitlab.com/security-products/secrets                      \n\u003Cnone>           16d88433af61   17 hours ago   74.9MB\n\n\n$ docker image rmi 16d88433af61 -f\n\nUntagged:\nregistry.gitlab.com/security-products/secrets@sha256:f331da6631d791fcd58d3f23d868475a520f50b02d64000e2faf1def66c75d48\n\nDeleted:\nsha256:16d88433af618f0b405945031de39fe40b3e8ef1bddb91ca036de0f5b32399d7\n\nDeleted:\nsha256:1bb06f72f06810e95a70039e797481736e492201f51a03b02d27db055248ab6f\n\nDeleted:\nsha256:a5ef2325ce4be9b39993ce301f8ed7aad1c854d7ee66f26a56a96967c6606510\n\nDeleted:\nsha256:f7cdac818a36d6c023763b76a6589c0db7609ca883306af4f38b819e62f29471\n\nDeleted:\nsha256:5eabf4d47287dee9887b9692d55c8b5f848b50b3b7248f67913036014e74a0e9\n\nDeleted:\nsha256:51b7cb600604c0737356f17bc02c22bac3a63697f0bf95ba7bacb5b421fdb7da\n\nDeleted:\nsha256:1546193b011d192aa769a15d3fdd55eb4e187f201f5ff7506243abb02525dc06\n\nDeleted:\nsha256:1ea72408d0484c3059cc0008539e6f494dc829caa1a97d156795687d42d9cb57\n\nDeleted:\nsha256:1313ee9da7716d85f63cfdd1129f715e9bbb6c9c0306e4708ee73672b3e40f26\n\nDeleted:\nsha256:954ebfd83406f0dfed93eb5157ba841af5426aa95d4054174fff45095fd873a1\n\n\n$ docker image rmi 0235ed43fc7f -f\n\nUntagged: registry.gitlab.com/security-products/secrets:6\n\nDeleted:\nsha256:0235ed43fc7fb2852c76e2d6196601968ae0375c72a517bef714cd712600f894\n\nDeleted:\nsha256:f05f85850cf4fac79e279d93afb6645c026de0223d07b396fce86c2f76096c1f\n\nDeleted:\nsha256:7432b0766b885144990edd3166fbabed081be71d28d186f4d525e52729f06b1f\n\nDeleted:\nsha256:2c6e3361c2ee2f43bd75fb9c7c12d981ce06df2d51a134965fa47754760efff0\n\nDeleted:\nsha256:7ad7f7245b45fbe758ebd5788e0ba268a56829715527a9a4bc51708c21af1c7f\n\nDeleted:\nsha256:3b73a621115a59564979f41552181dce07f3baa17e27428f7fff2155042a1901\n\nDeleted:\nsha256:78648c2606a7c4c76885806ed976b13e4d008940bd3d7a18b52948a6be71b60d\n\nDeleted:\nsha256:383d4a6dc5be9914878700809b4a3925379c80ab792dfe9e79d14b0c1d6b5fad\n\n```\n\n\nThen I'll rerun the job to show the failure:\n\n\n![GitLab Runner without internet access fails to pull an image from internal\nregistry\ncache](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099328/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750099328580.png)\n\n\n\u003Ccenter>\u003Ci>GitLab Runner without internet access fails to pull an image from\ninternal registry cache\u003C/i>\u003C/center>\n\n\n#### SCP file to GitLab instance\n\n\nNow, from my local machine, I will SCP the file to my GitLab instance as\nfollows:\n\n\n```bash\n\n$ gcloud compute scp secret-detection.tar INSTANCE:~ --zone=ZONE\n\nsecret-detection.tar                                                         \n100%   81MB  21.5MB/s   00:03\n\n```\n\n\n### Load transferred images into offline container registry\n\n\nNext, I'll SSH into my VM and load the Docker image:\n\n\n```bash\n\n$ gcloud compute ssh INSTANCE --zone=ZONE\n\n\n$ sudo docker load -i secret-detection.tar\n\nc3c8e454c212: Loading layer\n[==================================================>]  2.521MB/2.521MB\n\n51e93afaeedc: Loading layer\n[==================================================>]  32.55MB/32.55MB\n\ne8a25e39bb30: Loading layer\n[==================================================>]  221.2kB/221.2kB\n\n390704968493: Loading layer\n[==================================================>]  225.8kB/225.8kB\n\n76cf57e75f63: Loading layer\n[==================================================>]  17.64MB/17.64MB\n\nc4c7a681fd10: Loading layer\n[==================================================>]  4.608kB/4.608kB\n\nf0690f406157: Loading layer\n[==================================================>]  24.01MB/24.01MB\n\nLoaded image: registry.gitlab.com/security-products/secrets:6\n\n```\n\n\n### Run the scanners\n\n\nI'll [re-run the pipeline\nmanually](https://docs.gitlab.com/ee/ci/pipelines/#run-a-pipeline-manually)\nand the scanner will be pulled from the cache. Once the pipeline completes,\nwe can see the secret detection job is successful:\n\n\n![GitLab Runner without internet access successfully pulling from internal\nregistry cache after image\nloaded](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099328/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750099328581.png)\n\n\n\u003Ccenter>\u003Ci>GitLab Runner without internet access successfully pulling from\ninternal registry cache after image loaded\u003C/center>\u003C/i>\n\n\nIf you want to pull the image from a different location or you tag your\nimages in a different way, you can edit the config as follows:\n\n\n```yaml\n\ninclude:\n  - template: Jobs/Secret-Detection.gitlab-ci.yml\n\nvariables:\n  SECURE_ANALYZERS_PREFIX: \"localhost:5000/analyzers\"\n```\n\n\nSee the [offline environments\ndocumentation](https://docs.gitlab.com/ee/user/application_security/offline_deployments/)\nfor more information.\n\n\n### View scanner results\n\n\nOnce the scanner completes on the default branch, a vulnerability report is\npopulated with all the findings. The vulnerability report provides\ninformation about vulnerabilities from scans of the default branch.\n\n\nYou can access the vulnerability report by navigating to the side tab and\nselecting **Secure > Vulnerability Report**:\n\n\n![GitLab Vulnerability Report with secret detection\nfindings](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099328/Blog/Content%20Images/Blog/Content%20Images/vulnerability_report_aHR0cHM6_1750099328581.png)\n\n\n\u003Ccenter>\u003Ci>GitLab Vulnerability Report with secret detection\nfindings\u003C/i>\u003C/center>\n\n\n\u003Cbr>\u003C/br>\n\n\nThe project’s vulnerability report provides:\n\n- totals of vulnerabilities per severity level\n\n- filters for common vulnerability attributes\n\n- details of each vulnerability, presented in tabular layout\n\n- a timestamp showing when it was updated, including a link to the latest\npipeline\n\n\nWe can see that two vulnerabilities were detected by the Secret Detection\nscanner. If we click on a vulnerability, we will be transported to its\nvulnerability page:\n\n\n![GitLab Vulnerability Page showing detailed\ninsights](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099329/Blog/Content%20Images/Blog/Content%20Images/insights_aHR0cHM6_1750099328582.png)\n\n\n\u003Ccenter>\u003Ci>GitLab Vulnerability Page showing detailed insights\u003C/center>\u003C/i>\n\n\n\u003Cbr>\u003C/br>\n\n\nThe vulnerability page provides details of the vulnerability, which can be\nused to triage and find a path to remediation. These vulnerability details\ninclude:\n\n- description\n\n- when it was detected\n\n- current status\n\n- available actions\n\n- linked issues\n\n- actions log\n\n- filename and line number of the vulnerability (if available)\n\n- severity\n\n\n## Read more\n\n\nTo learn more about GitLab and running security scanners in air-gapped\nenvironments, check out the following resources:\n\n\n* [GitLab Ultimate](https://about.gitlab.com/pricing/ultimate/)  \n\n* [GitLab Security and Compliance\nSolutions](https://about.gitlab.com/solutions/security-compliance/)  \n\n* [GitLab Offline Deployments\nDocumentation](https://docs.gitlab.com/ee/user/application_security/offline_deployments/)  \n\n* [GitLab Application Security\nDocumentation](https://docs.gitlab.com/ee/user/application_security/)\n",[719,693,186,9,695],{"slug":3995,"featured":91,"template":698},"tutorial-security-scanning-in-air-gapped-environments","content:en-us:blog:tutorial-security-scanning-in-air-gapped-environments.yml","Tutorial Security Scanning In Air Gapped Environments","en-us/blog/tutorial-security-scanning-in-air-gapped-environments.yml","en-us/blog/tutorial-security-scanning-in-air-gapped-environments",{"_path":4001,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4002,"content":4007,"config":4012,"_id":4014,"_type":14,"title":4015,"_source":16,"_file":4016,"_stem":4017,"_extension":19},"/en-us/blog/u-s-navy-black-pearl-lessons-in-championing-devsecops",{"title":4003,"description":4004,"ogTitle":4003,"ogDescription":4004,"noIndex":6,"ogImage":3092,"ogUrl":4005,"ogSiteName":685,"ogType":686,"canonicalUrls":4005,"schema":4006},"U.S. Navy Black Pearl: Lessons in championing DevSecOps","Sigma Defense built a managed service software factory environment for the military using GitLab as its DevSecOps platform. Here's what they learned.","https://about.gitlab.com/blog/u-s-navy-black-pearl-lessons-in-championing-devsecops","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"U.S. Navy Black Pearl: Lessons in championing DevSecOps\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sandra Gittlen\"}],\n        \"datePublished\": \"2023-12-12\",\n      }",{"title":4003,"description":4004,"authors":4008,"heroImage":3092,"date":4009,"body":4010,"category":1073,"tags":4011},[1295],"2023-12-12","Manuel Gauto, director of engineering at government contractor [Sigma Defense](https://sigmadefense.com/), is a true DevSecOps champion. As co-creator of Black Pearl, a DevSecOps environment Sigma Defense manages for the U.S. Navy, Gauto witnesses firsthand the power that combining development, security, and operations can have in modernizing and scaling software development.\n\n\"If a DevSecOps environment is done correctly - where the tooling, security and compliance, connectivity, and onboarding are all handled as part of the platform – then mission owners can focus on mastering CI/CD in the context of their mission,\" Gauto said.\n\nGauto participated in GitLab's DevSecOps World Tour in Washington, D.C., speaking with GitLab Federal CTO Joel Krooswyk about Black Pearl and how consolidating a multitude of software factories into a single managed DevSecOps cloud environment has yielded tremendous results at scale, including:\n\n- a reduction in software factory setup time from around 6 months to 3 to 5 days\n- a 10x lower cost, decreasing from around $4 million to around $400,000\n- a more secure environment because there is inherent security with Authorization to Operate (ATO)\n- faster onboarding, decreasing from as long as 5 weeks to 1 day\n\n## The origins of Black Pearl\n\nA few years ago, the Navy had numerous software factories operating concurrently. Gauto himself was involved in standing a few of them up. \"We realized that it wasn't the most efficient approach – duplicative infrastructure in four or five different places that was ultimately doing the same thing,\" he said. \n\nThe team pitched the idea of a single environment that would consolidate cloud infrastructure, address security issues, and provide connectivity. That single environment was named \"Black Pearl\" and now consists of two offerings: Lighthouse, a DevSecOps infrastructure as code/configuration as code (IaC/CaC) baseline, and Party Barge, a managed shared offering.\n\nBlack Pearl’s common software environment with ATO provides commoditized DevSecOps tooling, pipeline component templates, governance/management, logging and metrics, integration infrastructure, cloud automation, and compute resources. The GitLab DevSecOps Platform is a major part of Black Pearl, providing \"a one-stop shop\" for source code management, tasks, documentation, and security scanning. Gauto said the dashboards and visualization are particularly integral to go/no-go decisions on shipping software.\n\n\"GitLab is the kind of platform that really enables us because it is the first time, even internally with our development, that we don't have to jump around to a bunch of different tools – we can just do everything in GitLab,\" he said. \"Having everyone on one platform also enables collaborative efficiency.\"\n\nGitLab's capabilities support the fast, secure, and cost-effective standup of software factories, according to Gauto.\n\n> Want to learn more about GitLab for the public sector? [Contact us today](https://about.gitlab.com/solutions/public-sector/).\n\n## How to build a strong DevSecOps environment\n\nIn the years since Black Pearl was first launched, Gauto has learned a lot about what makes a robust and secure DevSecOps environment. He said it comes down to tearing down silos and establishing a development ecosystem, centralizing security and compliance, providing the\nability to easily and quickly onboard talent, and remaining flexible and open to innovation. \n\n### Establish a strong development ecosystem\n\nIn large organizations, especially within government agencies, software development tends to break into silos. \"You'll have units of innovation that struggle to collaborate because they may work in one environment or in one building,\" Gauto said, adding that sharing anything – code, best practices, tooling, or infrastructure – can be challenging.\n\n\"By creating a well-established, well-maintained deployment of tooling, in particular, with GitLab, people can see what other teams are doing and share more readily,\" he said. \"Instead of mailing a CD to some lab somewhere else in the country, DevSecOps teams can just say, 'Let me add you as a developer on my project and you can kick around these repositories.'\" \n\nAn ecosystem helps aggregate demand in a way that breaks down barriers to infrastructure accreditation. \"We can go to the cyber community or certification community and say, 'I'm here representing a large group of users. This is a pain point we all have and we would like to work with you to figure it out,'\" Gauto said. For example, allowing people to connect to Black Pearl over the internet from a contractor machine, government machine, or wherever. \"It should not be this difficult in an unclassified environment.\"\n\nWith a strong ecosystem, you also can build up your best practices and processes around planning (such as Agile, Scrum, and Kanban), integrating on-site and remote development, gaining authorization for software, and delivering applications to various environments.\n\n### Apply security and compliance\n\nWhen it comes to security and compliance, Gauto said the biggest thing is to be able to see the train coming down the tracks and to be as prepared as possible. \"Let's not be surprised and let's not be standing on the tracks when it gets here,\" he said. \n\nOne area where that sentiment is wholly applicable is compliance, where mandates are evolving at breakneck speed. \"We want to be prepared to provide the data and the tooling in a format that's ingestible by the right people,\" he said.\n\nHe credits GitLab for helping with this challenge. \"GitLab Ultimate lets us just bake compliance in from the start and template a bunch of stuff from the start,\" which lets customers immediately start running with compliance, he said.\n\nGitLab also supports licensing and ATO scans in a single platform. \n\n### Support rapid onboarding of talent\n\nAcross the military, there are obstacles to accessing the best DevSecOps talent, including working in buildings with no windows, and having to jump through giant hoops to be able to work on classified networks.\n\n\"I think that really limits the talent that can be brought to the table to solve some of the really hard problems we have,\" Gauto said. For Black Pearl to be successful\nin supporting the missions, it was imperative to \"enable broader access to talent and then build sustainable onboarding workflows.\"\n\nWithin the DoD, there are a lot of difficult and interesting problems that need to be solved but the\nability to collaborate across government, industry, and academia can be a limiting factor. \"There are a large number of locations where software development is being done and without a common environment to work within, work can be repeated, lost, or otherwise underutilized,\" Gauto said.\n\nBlack Pearl provides an environment for different organizations to collaborate in a way that is accessible. Black Pearl has focused on ensuring that authorized users are able to access the environment from different devices, networks, and locations without onerous access procedures. This approach fosters the development of new ideas and increases the speed to new capabilities.\n\n### Enable flexibility and innovation\n\nThe military has so many different delivery environments – from submarines to aircraft carriers – that Black Pearl has to be incredibly flexible. \"We enable everyone to manage their own kingdom and focus their efforts on pieces that are specific to their problem space,\" Gauto said. \"We know there's not one pipeline to rule all. So we provide the toolkit and let everyone tailor the solution to what they need instead of saying, 'you have to do software development this way and you have to deliver it this way.'\"\n\nBlack Pearl encourages customers to have a sense of ownership over their environments, using the building blocks of GitLab Ultimate, including CI/CD pipelines, scanning, and testing. \"We want them to get to the point where they are ready to use all the tools that we offer,\" Gauto said. They also educate the customer so that the customer can drive their own requirements rather than Black Pearl having to pitch functionality to them.\n\nFor example, the Black Pearl team closely collaborates with the developer team for The Forge, a software factory for the Navy's Aegis integrated weapons system. \"One day The Forge team said, 'We feel like we should be scanning our source code for secrets before we check it in.' Exactly.\"\n\nHe also wants to be careful to not stifle innovation or overly restrict customers. \"Not everything is a containerized business application that goes to the cloud,\" he said. He instructs his team members to \"make sure we have a strategy for providing flexibility for people that are doing something weird, because the people that are doing something weird are usually doing something cool.\" \n\nArtificial intelligence and machine learning will be a test of this philosophy. \"There are going to be some novel tools and some novel data classifications that we are going to have to iterate on quickly,\" he said.\n\n## The proven thesis\n\nGauto is proud of Black Pearl's tremendous adoption rates, which have grown 400% over the past 12 months, and believes it is proof of the concept. \"The Black Pearl thesis of a managed service that enables people to quickly start solving their own problems without worrying about the 'boring' stuff can work and is valuable,\" he said.\n\n> Learn more about [GitLab for the public sector](https://about.gitlab.com/solutions/public-sector/).\n",[951,9,693,269,186],{"slug":4013,"featured":91,"template":698},"u-s-navy-black-pearl-lessons-in-championing-devsecops","content:en-us:blog:u-s-navy-black-pearl-lessons-in-championing-devsecops.yml","U S Navy Black Pearl Lessons In Championing Devsecops","en-us/blog/u-s-navy-black-pearl-lessons-in-championing-devsecops.yml","en-us/blog/u-s-navy-black-pearl-lessons-in-championing-devsecops",{"_path":4019,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4020,"content":4025,"config":4030,"_id":4032,"_type":14,"title":4033,"_source":16,"_file":4034,"_stem":4035,"_extension":19},"/en-us/blog/ultimate-guide-to-ci-cd-fundamentals-to-advanced-implementation",{"title":4021,"description":4022,"ogTitle":4021,"ogDescription":4022,"noIndex":6,"ogImage":3893,"ogUrl":4023,"ogSiteName":685,"ogType":686,"canonicalUrls":4023,"schema":4024},"Ultimate guide to CI/CD: Fundamentals to advanced implementation","Learn how to modernize continuous integration/continuous deployment, including automating the development, delivery, and security of pipelines.","https://about.gitlab.com/blog/ultimate-guide-to-ci-cd-fundamentals-to-advanced-implementation","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Ultimate guide to CI/CD: Fundamentals to advanced implementation\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sandra Gittlen\"}],\n        \"datePublished\": \"2025-01-06\",\n      }",{"title":4021,"description":4022,"authors":4026,"heroImage":3893,"date":4027,"body":4028,"category":1277,"tags":4029},[1295],"2025-01-06","Continuous integration/continuous delivery ([CI/CD](https://about.gitlab.com/topics/ci-cd/)) has revolutionized how software teams create value for their users. Gone are the days of manual deployments and integration headaches — modern development demands automation, reliability, and speed.\n\nAt its core, CI/CD is about creating a seamless pipeline that takes code from a developer's environment all the way to production and incorporates feedback in real time. [CI](https://about.gitlab.com/topics/ci-cd/benefits-continuous-integration/) helps teams catch issues early — before they become costly problems — by ensuring that code changes are frequently merged into a shared repository, automatically tested, and validated. [CD](https://about.gitlab.com/topics/ci-cd/#what-is-continuous-delivery-cd) extends this by automating deployments, making releases predictable and stress-free.\n\nRather than relying on manual processes and complex toolchains for software development, teams can use a robust CI/CD pipeline to build, test, and deploy software. And AI can streamline the process even further, automatically engineering CI/CD pipelines for consistent quality, compliance, and security checks.\n\nThis guide explains modern CI/CD pipelines, from basic principles to best practices to advanced strategies. You'll also discover how leading organizations use CI/CD for impactful results. What you learn in this guide will help you scale your DevSecOps environment to develop and deliver software in an [agile](https://about.gitlab.com/topics/ci-cd/continuous-integration-agile/), automated, and efficient manner.\n\nWhat you'll learn:\n- [What is continuous integration?](#what-is-continuous-integration%3F)\n- [What is continuous delivery?](#what-is-continuous-delivery%3F)\n- [How source code management relates to CI/CD](#how-source-code-management-relates-to-cicd)\n- [The benefits of CI/CD in modern software development](#the-benefits-of-cicd-in-modern-software-development)\n  - [Key differences between CI/CD and traditional development](#key-differences-between-cicd-and-traditional-development)\n- [Understanding CI/CD fundamentals](#understanding-cicd-fundamentals)\n  - [What is a CI/CD pipeline?](#what-is-a-cicd-pipeline%3F)\n- [Best practices for CI/CD implementation and management](#best-practices-for-cicd-implementation-and-management)\n  - [CI best practices](#ci-best-practices)\n  - [CD best practices](#cd-best-practices)\n- [How to get started with CI/CD](#how-to-get-started-with-cicd)\n- [Security, compliance, and CI/CD](#security-compliance%2C-and-cicd)\n- [CI/CD and the cloud](#cicd-and-the-cloud)\n- [Advanced CI/CD](#advanced-cicd)\n  - [Reuse and automation in CI/CD](#reuse-and-automation-in-cicd)\n  - [Troubleshooting pipelines with AI](#troubleshooting-pipelines-with-ai)\n- [How to migrate to GitLab CI/CD](#how-to-migrate-to-gitlab-cicd)\n- [Lessons from leading organizations](#lessons-from-leading-organizations)\n- [CI/CD tutorials](#cicd-tutorials)\n\n## What is continuous integration?\n\n[Continuous integration](https://about.gitlab.com/topics/ci-cd/benefits-continuous-integration/) (CI) is the practice of integrating all your code changes into the main branch of a shared source code repository early and often, automatically testing changes when you commit or merge them, and automatically kicking off a build. With continuous integration, teams can identify and fix errors and security issues more easily and much earlier in the development process.\n\n## What is continuous delivery?\n[Continuous delivery](https://about.gitlab.com/topics/ci-cd/#what-is-continuous-delivery-cd) (CD) – sometimes called _continuous deployment_ – enables organizations to deploy their applications automatically, allowing more time for developers to focus on monitoring deployment status and assure success. With continuous delivery, DevSecOps teams set the criteria for code releases ahead of time and when those criteria are met and validated, the code is deployed into the production environment. This allows organizations to be more nimble and get new features into the hands of users faster. \n\n## How source code management relates to CI/CD\n\nSource code management ([SCM](https://about.gitlab.com/solutions/source-code-management/)) and CI/CD form the foundation of modern software development practices. SCM systems like [Git](https://about.gitlab.com/blog/what-is-git-the-ultimate-guide-to-gits-role-and-functionality/) provide a centralized way to track changes, manage different versions of code, and facilitate collaboration among team members. When developers work on new features or bug fixes, they create branches from the main codebase, make their changes, and then [merge them through merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/). This branching strategy allows multiple developers to work simultaneously without interfering with each other's code, while maintaining a stable main branch that always contains production-ready code.\n\nCI/CD takes the code managed by SCM systems and automatically builds, tests, and validates it whenever changes are pushed. When a developer submits their code changes, the CI/CD system automatically retrieves the latest code, combines it with the existing codebase, and runs through a series of automated checks. These typically include compiling the code, running unit tests, performing static code analysis, and checking code coverage. If any of these steps fail, the team is immediately notified, allowing them to address issues before they impact other developers or make their way to production. This tight integration between source control and continuous integration creates a feedback loop that helps maintain code quality and prevents integration problems from accumulating.\n\n## The benefits of CI/CD in modern software development\n\n[CI/CD brings transformative benefits to modern software development](https://about.gitlab.com/blog/ten-reasons-why-your-business-needs-ci-cd/) by dramatically reducing the time and risk associated with delivering new features and fixes. The continuous feedback loop gives DevSecOps teams confidence their changes are automatically validated against the entire codebase. The result is higher quality software, faster delivery times, and more frequent releases that can quickly respond to user needs and market demands.\n\nPerhaps most importantly, CI/CD fosters a culture of collaboration and transparency within software development teams. When everyone can see the status of builds, tests, and deployments in real time, it becomes easier to identify and resolve bottlenecks in the delivery process. The automation provided by CI/CD also reduces the cognitive load on developers, freeing them to focus on writing code rather than managing manual deployment processes. This leads to improved developer satisfaction and productivity, while also reducing the risk traditionally associated with the entire software release process. Teams can experiment more freely knowing rapid code reviews are part of the process and they can quickly roll back changes if needed, which encourages innovation and continuous improvement.\n\n> Get started with GitLab CI/CD. [Sign up for GitLab Ultimate](https://about.gitlab.com/free-trial/devsecops/) and try the AI-powered DevSecOps platform free.\n\n### Key differences between CI/CD and traditional development\n\nCI/CD differs from traditional software development in many ways, including:\n\n**Frequent code commits**\n\nDevelopers often work independently and infrequently upload their code to a main codebase, causing merge conflicts and other time-consuming issues. With CI/CD, developers push commits throughout the day, ensuring that conflicts are caught early and the codebase remains up to date.\n\n**Reduced risk**\n\nLengthy testing cycles and extensive pre-release planning are hallmarks of traditional software development. This is done to minimize risk but often hinders the ability to find and fix problems. Risk is managed in CI/CD by applying small, incremental changes that are closely monitored and easily reverted.\n\n**Automated and continuous testing**\n\nIn traditional software development, testing is done once development is complete. However, this causes problems, including delayed delivery and costly bug fixes. CI/CD supports automated testing that occurs continuously throughout development, sparked by each code commit. Developers also receive feedback they can take fast action on.\n\n**Automated, repeatable, and frequent deployments**\n\nWith CI/CD, deployments are automated processes that reduce the typical stress and effort associated with big software rollouts. The same deployment process can be repeated across environments, which saves time and reduces errors and inconsistencies.\n\n## Understanding CI/CD fundamentals\n\nCI/CD serves as a framework for building scalable, maintainable delivery processes, so it's critical for DevSecOps teams to firmly grasp its core concepts. A solid understanding of CI/CD principles enables teams to adapt strategies and practices as technology evolves, rather than being tied to legacy approaches. Here are some of the basics.\n\n### What is a CI/CD pipeline?\n\nA [CI/CD pipeline](https://about.gitlab.com/topics/ci-cd/cicd-pipeline/) is a series of steps, such as build, test, and deploy, that automate and streamline the software delivery process. [Each stage serves as a quality gate](https://about.gitlab.com/blog/guide-to-ci-cd-pipelines/), ensuring that only validated code moves forward. Early stages typically handle basic checks like compilation and unit testing, while later stages may include integration testing, performance testing, compliance testing, and staged deployments to various environments.\n\nThe pipeline can be configured to require manual approvals at critical points, such as before deploying to production, while automating routine tasks and providing quick feedback to developers about the health of their changes. This structured approach ensures consistency, reduces human error, and provides a clear audit trail of how code changes move from development to production. Modern pipelines are often implemented as code, allowing them to be version controlled, tested, and maintained just like application code.\n\nThese are other terms associated with CI/CD that are important to know:\n- **Commit:** a code change\n- **Job:** instructions a runner has to execute\n- **Runner:** an agent or server that executes each job individually that can spin up or down as needed\n- **Stages:** a keyword that defines certain job stages, such as \"build\" and \"deploy.\" Jobs of the same stage are executed in parallel. Pipelines are configured using a version-controlled YAML file, `.gitlab-ci.yml`, at the root level of a project.\n\n![CI/CD pipeline diagram](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749673928/Blog/Content%20Images/1690824533476.png)\n\n## Best practices for CI/CD implementation and management\n\nHow successful you are with CI/CD depends greatly on the [best practices](https://about.gitlab.com/blog/how-to-keep-up-with-ci-cd-best-practices/) you implement. \n\n#### CI best practices\n\n* Commit early, commit often.\n* Optimize pipeline stages.\n* Make builds fast and simple.\n* Use failures to improve processes.\n* Make sure the test environment mirrors production.\n\n#### CD best practices\n\n* Start where you are – you can always iterate.\n* Understand the best continuous delivery is done with minimal tools.\n* Track what’s happening so issues and merge requests don't get out of hand.\n* Streamline user acceptance testing and staging with automation.\n* Manage the release pipeline through automation.\n* Implement monitoring for visibility and efficiency. \n\n> ### Bookmark this!\n>\n>Watch our [\"Intro to CI/CD\" webinar](https://www.youtube.com/watch?v=sQ7Nw3o0izc)!\n>\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n\u003Ciframe src=\"https://www.youtube.com/embed/sQ7Nw3o0izc?si=3HpNqIClrc2ncr7Y\" title=\"Intro to CI/CD webinar\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## How to get started with CI/CD\n\nGetting started with CI/CD begins with identifying a simple but representative project to serve as your pilot. Choose a straightforward application with basic testing requirements, as this allows you to focus on learning the pipeline mechanics rather than dealing with complex deployment scenarios. Begin by ensuring your code is in [version control](https://about.gitlab.com/topics/version-control/) and has some [basic automated tests](https://about.gitlab.com/blog/develop-c-unit-testing-with-catch2-junit-and-gitlab-ci/) — even a few unit tests will suffice. The goal is to [create a minimal pipeline](https://about.gitlab.com/blog/how-to-learn-ci-cd-fast/) that you can gradually enhance as your understanding grows.\n\nFor GitLab specifically, the process starts with creating a `.gitlab-ci.yml` file in your project's root directory. This YAML file defines your pipeline stages (basic ones like build, test, and deploy) and jobs. A simple pipeline might look like this: The build stage compiles your code and creates artifacts, the test stage runs your unit tests, and the deploy stage pushes your application to a staging environment. GitLab will automatically detect this file and start running your pipeline whenever changes are pushed to your repository. The platform provides [built-in runners](https://docs.gitlab.com/runner/) to execute your pipeline jobs, though you can also set up your own runners for more control.\n\nAs you become comfortable with the basics, gradually add more sophisticated elements to your pipeline. This might include adding code quality checks, [security scanning](https://docs.gitlab.com/ee/user/application_security/#security-scanning), or automated deployment to production. GitLab's DevSecOps platform includes features like [compliance management](https://about.gitlab.com/blog/meet-regulatory-standards-with-gitlab/), [deployment variables](https://about.gitlab.com/blog/demystifying-ci-cd-variables/), and manual approval gates that you can incorporate as your pipeline matures. Pay attention to pipeline execution time and look for opportunities to run jobs in parallel where possible. Remember to add proper error handling and notifications so team members are promptly alerted of any pipeline failures. Start documenting common issues and solutions as you encounter them — this will become invaluable as your team grows.\n\n> ### Want to learn more about getting started with CI/CD? Register for a [free CI/CD course on GitLab University](https://university.gitlab.com/courses/continuous-integration-and-delivery-ci-cd-with-gitlab).\n\n## Security, compliance, and CI/CD\n\nOne of the greatest advantages of CI/CD is the ability to embed security and compliance checks early and often in the software development lifecycle. In GitLab, teams can use the `.gitlab-ci.yml` configuration to automatically trigger security scans at multiple stages, from initial code commit to production deployment. The platform's container scanning, dependency scanning, and security scanning capabilities ([Dynamic Application Security Testing](https://docs.gitlab.com/ee/user/application_security/dast/) and [Advanced SAST](https://about.gitlab.com/blog/gitlab-advanced-sast-is-now-generally-available/)) can be configured to run automatically with each code change, checking for vulnerabilities, compliance violations, and security misconfigurations. The platform's API enables integration with [external security tools](https://about.gitlab.com/blog/integrate-external-security-scanners-into-your-devsecops-workflow/), while the test coverage features ensure security tests meet required thresholds.\n\nGitLab's security test reports provide detailed information about findings, enabling quick remediation of security issues before they reach production. The Security Dashboard provides a centralized view of vulnerabilities across projects, while [security policies can be enforced](https://about.gitlab.com/blog/how-gitlab-supports-the-nsa-and-cisa-cicd-security-guidance/) through merge request approvals and pipeline gates. In addition, GitLab provides multiple layers of secrets management to protect sensitive information throughout the CI/CD process, audit logs to track access to secrets, and role-based access control (RBAC) to ensure only authorized users can view or modify sensitive configuration data.\n\nGitLab also supports software bill of materials ([SBOM](https://about.gitlab.com/blog/the-ultimate-guide-to-sboms/)) generation, providing a comprehensive inventory of all software components, dependencies, and licenses in an application and enabling teams to quickly identify and respond to vulnerabilities and comply with regulatory mandates.\n\n## CI/CD and the cloud\n\nGitLab's CI/CD platform provides robust integration with major cloud providers including [Amazon Web Services](https://about.gitlab.com/partners/technology-partners/aws/), [Google Cloud Platform](https://about.gitlab.com/blog/provision-group-runners-with-google-cloud-platform-and-gitlab-ci/), and [Microsoft Azure](https://docs.gitlab.com/ee/install/azure/), enabling teams to automate their cloud deployments directly from their pipelines. Through GitLab's cloud integrations, teams can manage cloud resources, deploy applications, and monitor cloud services all within the GitLab interface. The platform's built-in cloud deployment templates and [Auto DevOps](https://docs.gitlab.com/ee/topics/autodevops/) features significantly reduce the complexity of cloud deployments, allowing teams to focus on application development rather than infrastructure management. For organizations that want to automate their IT   infrastructure using GitOps, GitLab has a [Flux CD integration](https://about.gitlab.com/blog/why-did-we-choose-to-integrate-fluxcd-with-gitlab/).\n\nGitLab's cloud capabilities extend beyond basic deployment automation. The platform's [Kubernetes integration](https://about.gitlab.com/blog/kubernetes-overview-operate-cluster-data-on-the-frontend/) enables teams to manage container orchestration across multiple cloud providers, while the [cloud native GitLab installation options](https://about.gitlab.com/topics/ci-cd/cloud-native-continuous-integration/) allow the platform itself to run in cloud environments. Through GitLab's cloud-native features, teams can implement auto-scaling runners that dynamically provision cloud resources for pipeline execution, optimizing costs and performance. The platform's integration with cloud provider security services ensures that security and compliance requirements are met throughout the deployment process.\n\nFor multi-cloud environments, GitLab provides consistent workflows and tooling regardless of the underlying cloud provider. Teams can use GitLab's environment management features to handle different cloud configurations across development, staging, and production environments. The platform's [infrastructure as code](https://docs.gitlab.com/ee/user/infrastructure/iac/) support, particularly its native integration with Terraform, enables teams to version control and automate their cloud infrastructure provisioning. GitLab's monitoring and observability features integrate with cloud provider metrics, providing comprehensive visibility into application and infrastructure health across cloud environments.\n\n## Advanced CI/CD \nCI/CD has evolved far beyond simple build and deploy pipelines. In advanced implementations, CI/CD involves sophisticated orchestration of automated testing, security scanning, infrastructure provisioning, AI, and more. Here are a few advanced CI/CD strategies that can help engineering teams scale their pipelines and troubleshoot issues even as architectural complexity grows.\n\n### Reuse and automation in CI/CD\n\nGitLab is transforming how development teams create and manage CI/CD pipelines with two major innovations: the [CI/CD Catalog](https://about.gitlab.com/blog/ci-cd-catalog-goes-ga-no-more-building-pipelines-from-scratch/) and [CI/CD steps](https://about.gitlab.com/blog/introducing-ci-cd-steps-a-programming-language-for-devsecops-automation/), a new programming language for DevSecOps automation currently in experimental phase. The CI/CD Catalog is a centralized platform where developers can discover, reuse, and contribute CI/CD components. Components function as reusable, single-purpose building blocks that simplify pipeline configuration — similar to Lego pieces for CI/CD workflows. Meanwhile, CI/CD steps support complex workflows by allowing developers to compose inputs and outputs for a CI/CD job. With the CI/CD Catalog and CI/CD steps, DevSecOps teams can easily standardize CI/CD and its components, simplifying the process of developing and maintaining CI/CD pipelines.\n\n> Learn more in our [CI/CD Catalog FAQ](https://about.gitlab.com/blog/faq-gitlab-ci-cd-catalog/) and [CI/CD steps documentation](https://docs.gitlab.com/ee/ci/steps/).\n\n### Troubleshooting pipelines with AI\n\nWhile CI/CD pipelines can and do break, troubleshooting the issue quickly can minimize the impact. GitLab Duo Root Cause Analysis, part of a suite of AI-powered features, removes the guesswork by [determining the root cause for a failed CI/CD pipeline](https://about.gitlab.com/blog/quickly-resolve-broken-ci-cd-pipelines-with-ai/). When a pipeline fails, GitLab provides detailed job logs, error messages, and execution traces that show exactly where and why the failure occurred. Root Cause Analysis then uses AI to suggest a fix.\nWatch GitLab Duo Root Cause Analysis in action:\n\n\u003C!-- blank line -->\n\u003Cfigure class=\"video_container\">\n\u003Ciframe src=\"https://www.youtube.com/embed/sTpSLwX5DIs?si=J6-0Bf6PtYjrHX1K\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\u003C!-- blank line -->\n\n## How to migrate to GitLab CI/CD\n\nMigrating to the DevSecOps platform and its built-in CI/CD involves a systematic approach of analyzing your existing pipeline configurations, dependencies, and deployment processes to map them to GitLab's equivalent features and syntax. Use these guides to help make the move.\n\n* [How to migrate from Bamboo to GitLab CI/CD](https://about.gitlab.com/blog/migrating-from-bamboo-to-gitlab-cicd/)\n* [Jenkins to GitLab: The ultimate guide to modernizing your CI/CD environment](https://about.gitlab.com/blog/jenkins-gitlab-ultimate-guide-to-modernizing-cicd-environment/)\n* [GitHub to GitLab migration the easy way](https://about.gitlab.com/blog/github-to-gitlab-migration-made-easy/)\n\n## Lessons from leading organizations\n\nThese leading organizations migrated to GitLab and are enjoying the myriad benefits of CI/CD. Read their stories.\n\n- [Lockheed Martin](https://about.gitlab.com/customers/lockheed-martin/)\n- [Indeed](https://about.gitlab.com/blog/how-indeed-transformed-its-ci-platform-with-gitlab/)\n- [CARFAX](https://about.gitlab.com/customers/carfax/)\n- [HackerOne](https://about.gitlab.com/customers/hackerone/)\n- [Betstudios](https://about.gitlab.com/blog/betstudios-cto-on-improving-ci-cd-capabilities-with-gitlab-premium/)\n- [Thales and Carrefour](https://about.gitlab.com/blog/how-carrefour-and-thales-are-evolving-their-ci-cd-platforms/)\n\n## CI/CD tutorials\n\nBecome a CI/CD expert with these easy-to-follow tutorials.\n\n* [Basics of CI: How to run jobs sequentially, in parallel, or out of order](https://about.gitlab.com/blog/basics-of-gitlab-ci-updated/)\n* [How to set up your first GitLab CI/CD component](https://about.gitlab.com/blog/tutorial-how-to-set-up-your-first-gitlab-ci-cd-component/)\n* [Building a GitLab CI/CD pipeline for a monorepo the easy way](https://about.gitlab.com/blog/building-a-gitlab-ci-cd-pipeline-for-a-monorepo-the-easy-way/)\n* [Using child pipelines to continuously deploy to five environments](https://about.gitlab.com/blog/using-child-pipelines-to-continuously-deploy-to-five-environments/)\n* [CI/CD automation: Maximize 'deploy freeze' impact across GitLab groups](https://about.gitlab.com/blog/ci-cd-automation-maximize-deploy-freeze-impact-across-gitlab-groups/)\n* [Refactoring a CI/CD template to a CI/CD component](https://about.gitlab.com/blog/refactoring-a-ci-cd-template-to-a-ci-cd-component/)\n* [Annotate container images with build provenance using Cosign in GitLab CI/CD](https://about.gitlab.com/blog/annotate-container-images-with-build-provenance-using-cosign-in-gitlab-ci-cd)\n\n> #### Get started with GitLab CI/CD. [Sign up for GitLab Ultimate](https://about.gitlab.com/free-trial/devsecops/) and try the AI-powered DevSecOps platform.",[109,951,9,719,693,717],{"slug":4031,"featured":91,"template":698},"ultimate-guide-to-ci-cd-fundamentals-to-advanced-implementation","content:en-us:blog:ultimate-guide-to-ci-cd-fundamentals-to-advanced-implementation.yml","Ultimate Guide To Ci Cd Fundamentals To Advanced Implementation","en-us/blog/ultimate-guide-to-ci-cd-fundamentals-to-advanced-implementation.yml","en-us/blog/ultimate-guide-to-ci-cd-fundamentals-to-advanced-implementation",{"_path":4037,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4038,"content":4044,"config":4051,"_id":4053,"_type":14,"title":4054,"_source":16,"_file":4055,"_stem":4056,"_extension":19},"/en-us/blog/ultimate-guide-to-migrating-from-aws-codecommit-to-gitlab",{"title":4039,"description":4040,"ogTitle":4039,"ogDescription":4040,"noIndex":6,"ogImage":4041,"ogUrl":4042,"ogSiteName":685,"ogType":686,"canonicalUrls":4042,"schema":4043},"Ultimate guide to migrating from AWS CodeCommit to GitLab","Learn how to migrate from AWS Services to GitLab and seamlessly integrate with the DevSecOps platform in this comprehensive tutorial.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097810/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%2828%29_4mi0l4wzUa5VI4wtf8gInx_1750097810027.png","https://about.gitlab.com/blog/ultimate-guide-to-migrating-from-aws-codecommit-to-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Ultimate guide to migrating from AWS CodeCommit to GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Tsukasa Komatsubara\"},{\"@type\":\"Person\",\"name\":\"Darwin Sanoy\"},{\"@type\":\"Person\",\"name\":\"Samer Akkoub\"},{\"@type\":\"Person\",\"name\":\"Bart Zhang\"}],\n        \"datePublished\": \"2024-08-26\",\n      }",{"title":4039,"description":4040,"authors":4045,"heroImage":4041,"date":4048,"body":4049,"category":717,"tags":4050},[3936,714,4046,4047],"Samer Akkoub","Bart Zhang","2024-08-26","On July 25, 2024, AWS made a significant announcement regarding its CodeCommit service. As detailed in their [official blog post](https://aws.amazon.com/blogs/devops/how-to-migrate-your-aws-codecommit-repository-to-another-git-provider/), AWS has decided to close new customer access to CodeCommit. While existing customers can continue using the service, AWS will not introduce new features, focusing only on security, availability, and performance improvements.\n\nThis announcement has prompted development teams to consider migrating their repositories to alternative Git providers. In light of these changes, we've prepared this comprehensive guide to assist teams in migrating to GitLab and integrating with other AWS services.\n\n**Note:** For more details on AWS's official migration recommendations, please refer to [their blog post](https://aws.amazon.com/blogs/devops/how-to-migrate-your-aws-codecommit-repository-to-another-git-provider/).\n\n## About this guide\n\nThis guide provides comprehensive information for development teams using GitLab who are considering integration with AWS services or planning to migrate from AWS-hosted Git repositories to GitLab.com. The guide is structured into three main sections:\n\n- [Parallel migration to GitLab](#section-1-parallel-migration-to-gitlab): Explains how to gradually migrate from existing AWS-hosted repositories to GitLab.com while minimizing risks.\n\n- [Integration with AWS CodeBuild](#section-2-integrating-gitlab-with-aws-codebuild): Provides steps to integrate GitLab repositories with AWS CodeBuild, setting up a powerful continuous integration (CI) environment.\n\n- [Integration with AWS CodePipeline](#section-3-integrating-gitlab-with-aws-codepipeline): Details how to connect GitLab repositories with AWS CodePipeline to build efficient continuous delivery (CD) pipelines.\n\n- [Downstream integrations for CodePipeline and CodeStar Connections](#section-4-migrating-to-gitlab): Explains how to leverage GitLab-AWS connections for widespread service access, unlocking a cascade of integration possibilities across the AWS ecosystem.\n\nThrough this guide, you'll learn how to combine the powerful features of GitLab and AWS to create an efficient and flexible development workflow.\n\n## Section 1: Parallel migration to GitLab \n\nFor those considering migrating Git repositories hosted on AWS to GitLab.com, this section, which is a phased approach, introduces methods to achieve migration while minimizing risks. By leveraging GitLab's mirroring capabilities, you can maintain existing development flows while testing the new environment.\n\n### Why is parallel migration important?\n\nLarge-scale system migrations always involve risks, particularly potential impacts on ongoing development work, existing integrations, and automated processes. Adopting a parallel migration approach offers the following benefits:\n\n1. Risk minimization: Test the new environment while keeping existing systems operational.\n2. Seamless transition: Development teams can gradually acclimate to the new system.\n3. Integration testing: Thoroughly test all integrations and automation in the new environment.\n4. Future-proofing: Enable teams to gradually migrate to GitLab CI/CD in parallel to existing CI.\n\nParallel migration is not required if it is already known that you want to cut over directly to GitLab.\n\n### Steps for migrating to GitLab.com\n\n#### Step 1: Get set up on GitLab.com\n\n- Check if your company already has a group in use on GitLab.com and whether they have single sign-on (SSO) set up – if they do, then you will want to use both.\n\n- If your company does not have a presence on GitLab.com, visit [GitLab.com](www.gitlab.com) and create a new account or log in to an existing one.\n- Create a new company namespace (a group at the root level of gitlab.com).\n- Pick a name that reflects your entire company (and is not already taken).\n\n#### Step 2: Import repository\nFor parallel migration: Use GitLab's pull mirroring feature to automatically sync changes from AWS-hosted repositories to GitLab.com.\n\n1. Navigate to the target group GitLab.com.\n2. In the upper right, click \"New project.\"\n3. On the \"Create new project\" page, click \"Import project.\"\n4. On the \"Import project\" page, click \"Repository by URL.\"\n5. Enter the URL of your AWS-hosted repository in the \"Git repository URL\" field.\n6. Underneath the Git repository URL field, check \"Mirror repository.\"\n7. Set up authentication: in the AWS CodeCommit console, select the clone URL for the repository you will migrate. If you plan on importing CodeCommit repositories into GitLab, you can use the HTTPS CodeCommit URL to clone the repository via GitLab Repository Mirroring. You will need to also provide your Git credentials from AWS for your identity and access management (IAM) user within GitLab. You can create Git credentials for AWS CodeCommit by following this [AWS guide](https://docs.aws.amazon.com/codecommit/latest/userguide/setting-up-gc.html).\n\n![Clone URL](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097822/Blog/Content%20Images/Blog/Content%20Images/clone-url-screenshot__1__aHR0cHM6_1750097822121.png)\n\nThis setup will automatically pull changes from the AWS-hosted repository to GitLab.com every five minutes by default.\n\nFor more information, read our [repository mirroring documentation](https://docs.gitlab.com/ee/user/project/repository/mirror/).\n\n#### Step 3: Test and validate integrations\n\n1. CI/CD pipelines: Set up the `.gitlab-ci.yml` file in GitLab CI to replicate existing pipelines. You can read more about [planning a migration from other CI tools into GitLab CI/CD](https://docs.gitlab.com/ee/ci/migration/plan_a_migration.html).\n2. Issue tracking: Import project issues and test workflows.\n3. Code review: Set up the merge request process and test review workflows.\n\n#### Step 4: Gradual migration\n\n1. Start with small or non-critical projects to familiarize yourself with working on GitLab.com.\n2. Provide training for team members and allow time to adapt to new workflows.\n3. Gradually migrate more projects while ensuring integrations and workflows are problem-free.\n\nFor more information, see [Automating Migrations from CodeCommit to GitLab](https://gitlab.com/guided-explorations/aws/migrating-from-codecommit-to-gitlab/-/blob/main/migrating_codecommit_to_gitlab.md).\n\n#### Step 5: Complete migration\nOnce all tests and validations are complete and the team is comfortable with the new environment, plan for full migration. For each project:\n\n1. Set a migration date and notify all stakeholders.\n2. Perform final data synchronization.\n3. Remove mirroring settings from the GitLab project.\n4. Set AWS-hosted repositories to read-only and transition all development work to GitLab.com.\n\n#### Step 6: Assess adoption of new capabilities\n\nGitLab collaboration and workflow automation for developers is far richer than CodeCommit. It merits some time to learn what these capabilities are. The merge request process is especially rich compared to CodeCommit.\n\nAfter repositories are stable on GitLab, it is very easy to experiment with GitLab CI/CD in parallel to an existing solution. Teams can take time to perfect their GitLab CI/CD automation while production workflows remain unaffected.\n\nGitLab artifact management is also very capable with the Releases feature and many package registries.\n\n### Section 1: Summary\nBy adopting a parallel migration approach to GitLab, you can achieve a smooth transition while minimizing risks. This process allows teams to gradually adapt to the new environment and ensure all integrations and automations function correctly. Cutover migrations only omit a single setting checkbox if it is known that a parallel migration is not necessary.\n\n## Section 2: Integrating GitLab with AWS CodeBuild\n\nFor those wanting to build and test code from GitLab repositories using AWS CodeBuild, this comprehensive guide will help you set up an efficient CI pipeline.\n\n### Prerequisites\n\n- GitLab.com account\n- AWS account\n- AWS CLI (configured)\n\n### Step 1: Create GitLab connection in AWS CodeStar Connections\n\n1. Log in to the AWS Management Console and navigate to the CodeBuild service.\n2. Select \"Settings\" > \"Connections\" from the left navigation panel.\n3. Click the \"Create connection\" button.\n4. Choose \"GitLab\" as the provider.\n5. Enter a connection name and click \"Connect to GitLab.\"\n6. You'll be redirected to the GitLab authentication page.\n7. Approve the necessary permissions.\n8. Once successful, the connection status will change to \"Available.\"\n\n![CodeStar Connect setup](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097822/Blog/Content%20Images/Blog/Content%20Images/codestar-connections-setup_aHR0cHM6_1750097822122.png)\n\n### Step 2: Create AWS CodeBuild project\n\n1. Click \"Create build project\" on the CodeBuild dashboard.\n2. Enter a project name and description.\n3. For source settings, select \"GitLab\" as the provider.\n4. Choose the connection you just created and specify the GitLab repository and branch.\n\n![Add CodeBuild project](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097822/Blog/Content%20Images/Blog/Content%20Images/codepipeline_step_3_add_codebuild_aHR0cHM6_1750097822123.png)\n\n**Note: From Step 3 forward, please configure the settings according to your specific environment and needs.**\n\n### Summary of Section 2\nThis section explained in detail how to integrate GitLab repositories with AWS CodeBuild. This setup enables a continuous integration pipeline where code changes in GitLab are automatically built and tested using AWS CodeBuild.\n\n## Section 3: Integrating GitLab with AWS CodePipeline\n\nFor those looking to implement continuous delivery from GitLab repositories using AWS CodePipeline, this detailed guide will be helpful. The integration has become even easier now that GitLab is available as an AWS CodeStar Connections provider.\n\n### Prerequisites\n\n- GitLab.com account\n- AWS account\n- AWS CLI (configured)\n\n### Step 1: Create GitLab connection in AWS CodeStar Connections\n\n1. Log in to the AWS Management Console and navigate to the CodePipeline service.\n2. Select \"Settings\" > \"Connections\" from the left navigation panel.\n3. Click the \"Create connection\" button.\n4. Choose \"GitLab\" as the provider.\n5. Enter a connection name and click \"Connect to GitLab.\"\n6. You'll be redirected to the GitLab authentication page.\n7. Approve the necessary permissions.\n8. Once successful, the connection status will change to \"Available.\"\n\n![CodeStar Connections setup](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097822/Blog/Content%20Images/Blog/Content%20Images/codestar-connections-setup_aHR0cHM6_1750097822125.png)\n\n### Step 2: Create AWS CodePipeline\n\n1. Click \"Create pipeline\" on the CodePipeline dashboard.\n2. Enter a pipeline name and click \"Next.\"\n3. Select \"GitLab\" as the source provider.\n4. Choose the connection you just created and specify the GitLab repository and branch.\n5. Select the Trigger type: You can trigger CodePipeline pipeline execution based on either pull or push events against specific branches and file types within your repository.\n\n![Add source provider](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097822/Blog/Content%20Images/Blog/Content%20Images/codepipeline_step_2_source_provider_aHR0cHM6_1750097822127.png)\n\n![Add source configuration](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097822/Blog/Content%20Images/Blog/Content%20Images/codepipeline_step_2_source_configured_aHR0cHM6_1750097822129.png)\n\n**Note: From Step 3 forward, please configure the settings according to your specific environment and needs.**\n\n### Summary of Section 3\nThis section detailed how to integrate GitLab repositories with AWS CodePipeline. This setup enables a continuous delivery pipeline where code changes in GitLab are automatically deployed to your AWS environment.\n\n## Section 4: Migrating to GitLab\n\nIntegrating GitLab with AWS unlocks powerful capabilities for streamlining your development and deployment workflows and helps to solve your source code management woes. This integration can be achieved in several ways, each offering unique benefits:\n\n- Using AWS CodeStar Connections to link GitLab with AWS services enables a more cohesive workflow by allowing external Git repositories, like GitLab, to connect with various AWS services. This setup supports automated builds, deployments, and other essential actions directly from your GitLab repository, making your development process more integrated and streamlined.\n\n- Connecting GitLab with AWS CodePipeline via AWS CodeStar Connections takes automation to the next level by allowing you to create a full CI/CD pipeline. This approach integrates GitLab with AWS CodePipeline, enabling you to automate the entire process – from source control and builds to testing and deployment – using AWS services like CodeBuild and CodeDeploy. This ensures a robust, scalable, and efficient delivery process.\n\n![Chart of new technology and solutions for using GitLab and AWS together](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097822/Blog/Content%20Images/Blog/Content%20Images/Announcing_New_Technology_and_Solutions_for_using_GitLab_and_AWS_Together_aHR0cHM6_1750097822130.png)\n\n1\\. Connecting GitLab with AWS services using AWS CodeStar Connections\n\nAWS CodeStar Connections is a service that allows you to connect external Git repositories (such as GitHub or Bitbucket) to AWS services. You can also connect GitLab to AWS services via CodeStar Connections. When using GitLab, you may need to set up a custom connection as an HTTP Git server.\nThe following AWS services can be connected to GitLab using this method:\n\n- **AWS Service Catalog**\n\nAWS Service Catalog helps organizations standardize and manage AWS resources. Integrating it with GitLab improves transparency in resource management and simplifies change tracking. Specifically, you can automate catalog updates based on GitLab commits, enhancing operational efficiency.\n\n- __AWS CodeBuild__\n\nAWS CodeBuild is a managed build service that compiles source code, runs tests, and produces deployable software packages. Integrating GitLab with CodeBuild allows automated build processes to start whenever code changes are pushed to GitLab. This ensures consistency in builds and facilitates easier collaboration and version control.\n\n- __AWS Glue Notebook Jobs__\n\nAWS Glue Notebook Jobs is a service that allows you to interactively develop and run data preparation and ETL (Extract, Transform, Load) tasks. Integrating GitLab with Glue Notebook Jobs enables version control for notebooks and ETL scripts, promotes collaboration among team members, and improves the quality management of data processing pipelines.\n\n- __AWS Proton__\n\nAWS Proton is a service that automates the development and deployment of microservices and serverless applications. By integrating GitLab with AWS Proton, you can manage infrastructure as code, automate deployments, and ensure consistent environment management, leading to more efficient development processes.\n\nAs AWS CodeStar Connections supports more services, connecting GitLab with additional AWS services will become easier. It's advisable to regularly check for new services that support CodeStar Connections.\n\n2. Connecting CodePipeline with GitLab via AWS CodeStar Connections (including CodeDeploy)\n\nAWS CodePipeline is a continuous delivery service that automates the release process for software. To connect GitLab with CodePipeline, you need to use AWS CodeStar Connections. This setup allows you to designate a GitLab repository as the source and automate the entire CI/CD pipeline.\nThe primary actions supported by CodePipeline include:\n- **Source control:** AWS CodeCommit, GitHub, Bitbucket, GitLab\n- **Build and test:** AWS CodeBuild, Jenkins\n- **Deploy:** AWS CodeDeploy, Elastic Beanstalk, ECS, S3\n- **Approval:** Manual approval\n- **Infrastructure management:** AWS CloudFormation\n- **Serverless:** AWS Lambda\n- **Testing:** AWS Device Farm\n- **Custom Actions:** AWS Step Functions\n\nBy integrating GitLab with CodePipeline, you can automatically trigger the pipeline whenever code changes are pushed to GitLab, allowing a consistent process from build to deployment. Additionally, combining this with GitLab's version control capabilities makes it easier to track deployment history and states, leading to more flexible and reliable software delivery.\n\n## What you've learned\nThis guide has provided comprehensive information on migrating to and integrating GitLab with AWS. Through the four main topics, we've covered:\n- Parallel migration to GitLab: How to gradually migrate from existing AWS-hosted repositories to GitLab.com while minimizing risks.\n- Integration with AWS CodeBuild: Steps to set up a powerful CI environment integrated with GitLab repositories.\n- Integration with AWS CodePipeline: How to build efficient continuous delivery pipelines using GitLab repositories.\n- Downstream integrations for CodePipeline and CodeStar Connections: Leveraging GitLab-AWS connections for widespread service access, unlocking a cascade of integration possibilities across the AWS ecosystem.\n\nAs every organization's code hosting and integration implementation strategy is unique, this tutorial may be used as a starting point for your own GitLab + AWS integration and implementation strategy.\n\n## Additional resources\n\nFor more detailed information and advanced configurations, refer to the following resources:\n\n- [GitLab documentation](https://docs.gitlab.com/)\n- [AWS CodeBuild User Guide](https://docs.aws.amazon.com/codebuild/latest/userguide/welcome.html)\n- [AWS CodePipeline User Guide](https://docs.aws.amazon.com/codepipeline/latest/userguide/welcome.html)\n- [GitLab CI/CD documentation](https://docs.gitlab.com/ee/ci/)\n- [Integrate with AWS](https://docs.gitlab.com/ee/solutions/cloud/aws/gitlab_aws_integration.html)\n\nIf you have questions or need support, please contact [GitLab Support](https://about.gitlab.com/support/) or AWS Support. We hope this comprehensive guide helps you in your AWS-GitLab integration journey.",[109,805,9,719,930,717,233],{"slug":4052,"featured":91,"template":698},"ultimate-guide-to-migrating-from-aws-codecommit-to-gitlab","content:en-us:blog:ultimate-guide-to-migrating-from-aws-codecommit-to-gitlab.yml","Ultimate Guide To Migrating From Aws Codecommit To Gitlab","en-us/blog/ultimate-guide-to-migrating-from-aws-codecommit-to-gitlab.yml","en-us/blog/ultimate-guide-to-migrating-from-aws-codecommit-to-gitlab",{"_path":4058,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4059,"content":4065,"config":4070,"_id":4072,"_type":14,"title":4073,"_source":16,"_file":4074,"_stem":4075,"_extension":19},"/en-us/blog/unlocking-agile-excellence-gitlab-epics-for-seamless-portfolio-management",{"title":4060,"description":4061,"ogTitle":4060,"ogDescription":4061,"noIndex":6,"ogImage":4062,"ogUrl":4063,"ogSiteName":685,"ogType":686,"canonicalUrls":4063,"schema":4064},"Unlocking Agile excellence: GitLab Epics for seamless portfolio management","Explore how GitLab's multi-level epics revolutionize Agile portfolio management, offering a structured yet flexible approach for strategic planning and efficient execution.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098713/Blog/Hero%20Images/Blog/Hero%20Images/agile_agile.png_1750098713577.png","https://about.gitlab.com/blog/unlocking-agile-excellence-gitlab-epics-for-seamless-portfolio-management","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Unlocking Agile excellence: GitLab Epics for seamless portfolio management\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Amanda Rueda\"},{\"@type\":\"Person\",\"name\":\"Melissa Ushakov\"}],\n        \"datePublished\": \"2024-02-06\",\n      }",{"title":4060,"description":4061,"authors":4066,"heroImage":4062,"date":4067,"body":4068,"category":1053,"tags":4069},[1050,1720],"2024-02-06","Effective portfolio management is crucial for the success of organizations in the ever-evolving landscape of software development. Placing the right bets, optimally allocating resources, and mitigating risks are all required for effectively executing on your business strategy.\n\nGitLab's portfolio management capabilities offer a structured yet flexible approach for connecting strategy to execution. In this blog post, we'll explore the advantages of GitLab's multi-level epics and their pivotal role in Agile portfolio management.\n\n## Understanding GitLab's multi-level epics\nGitLab's multi-level epics allow organizations to organize their work in a hierarchical structure, providing a clear and detailed view of projects and their interdependencies so that teams can make informed decisions, anticipate potential challenges, and streamline their workflows for enhanced efficiency and successful project delivery. Unlike other tools, GitLab allows nesting epics up to seven layers deep across different groups and projects, enabling efficient cross-functional coordination.\n\n![epics portfolio management - image 1](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098728/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750098727863.png)\n\n## Advantages of multi-level epics in portfolio management\n\nThe following are four advantages of multi-level epics in portfolio management.\n\n### 1. Flexible compatibility with Agile frameworks\n\nEpics seamlessly adapt to various scaled Agile frameworks, allowing teams to adjust GitLab to your preferred way of working. Multi-level epics offer a flexible construct to represent different higher-level planning work items with minimal configuration required. This adaptability means teams can efficiently use GitLab for product planning workflows without the need for extensive setup so that they can focus more of their time on strategic planning and less on configuring tools.\n\n### 2. Granular work breakdown\n\nGitLab's multi-level epics enable organizations to break down intricate projects into smaller, manageable components so that teams can identify smaller iterations, allowing for the faster and more frequent delivery of tangible user value. Higher-level epics provide a strategic overview that can span multiple years, while epics closer to deliverables can typically be completed in a handful of sprints. Epics can be broken down into issues and tasks to tie strategic objectives to more tactical deliverables.\n\n![epics portfolio management - image 2](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098728/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750098727864.png)\n\n### 3. Real-time visibility and collaboration\n\nGitLab epics offer real-time visibility into progress, promoting effective team collaboration. GitLab offers unparalleled traceability, with automatic epic progress updates based on downstream DevSecOps activity for connected work items, allowing stakeholders to make informed decisions based on the most up-to-date information, allocate resources effectively, and maintain a proactive approach to managing product timelines.\n\n![epics portfolio management - image 3](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098728/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098727866.png)\n\n### 4. Cross-team planning\n\nMulti-level epics facilitate efficient planning across multiple teams by providing a centralized view of work breakdown and dependencies from teams across your organization, ensuring cohesive collaboration and alignment of efforts. All information for your organization's Agile planning processes and execution of the work lives within a single tool, providing the shared context of how a team's work relates to higher-level strategy.\n\n![epics portfolio management - image 4](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098728/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098727871.png)\n\nGitLab’s portfolio management tools, including multi-level epics, ensure that strategic planning aligns seamlessly with the execution of projects, allowing teams to navigate the complexities of software development with precision, efficiency, and a clear focus on overarching business goals.\n\nReady to harness the full potential of GitLab's multi-level epics and elevate your portfolio management experience? Explore our subscription options on our [pricing page](https://about.gitlab.com/pricing/), and unlock a host of powerful features designed to fuel collaboration, enhance transparency, and propel your projects toward success.",[908,951,9],{"slug":4071,"featured":91,"template":698},"unlocking-agile-excellence-gitlab-epics-for-seamless-portfolio-management","content:en-us:blog:unlocking-agile-excellence-gitlab-epics-for-seamless-portfolio-management.yml","Unlocking Agile Excellence Gitlab Epics For Seamless Portfolio Management","en-us/blog/unlocking-agile-excellence-gitlab-epics-for-seamless-portfolio-management.yml","en-us/blog/unlocking-agile-excellence-gitlab-epics-for-seamless-portfolio-management",{"_path":4077,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4078,"content":4083,"config":4088,"_id":4090,"_type":14,"title":4091,"_source":16,"_file":4092,"_stem":4093,"_extension":19},"/en-us/blog/unveiling-a-new-epic-experience-for-improved-agile-planning",{"title":4079,"description":4080,"ogTitle":4079,"ogDescription":4080,"noIndex":6,"ogImage":1735,"ogUrl":4081,"ogSiteName":685,"ogType":686,"canonicalUrls":4081,"schema":4082},"Unveiling a new epic experience for improved Agile planning","Explore the update for GitLab epics that enhances planning and improves workflows – all with seamless migration for better project management.","https://about.gitlab.com/blog/unveiling-a-new-epic-experience-for-improved-agile-planning","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Unveiling a new epic experience for improved Agile planning\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Amanda Rueda\"}],\n        \"datePublished\": \"2024-07-03\",\n      }",{"title":4079,"description":4080,"authors":4084,"heroImage":1735,"date":4085,"body":4086,"category":1053,"tags":4087},[1050],"2024-07-03","In our ongoing journey to enhance the Agile planning experience in GitLab, we [recently unveiled a new look](https://about.gitlab.com/blog/first-look-the-new-agile-planning-experience-in-gitlab/). This update marks a significant step toward creating a unified and flexible planning tool tailored to your needs. This article explores a crucial part of that initiative: the new epic experience. You'll learn about upcoming epic features and the motivations behind these changes, which are designed to elevate your project management capabilities.\n\n## Why the new epic experience?\n\n### Addressing user feedback\nAs part of our mission to provide a comprehensive Agile planning experience, we've listened closely to your feedback. Users have highlighted challenges with the current epic implementation, such as inconsistent features between epics and issues and a lack of flexibility to support diverse workflows. Some pain points focused on workflow tools, including the absence of assignees on epics and a lack of reusable templates. The new epic experience addresses these pain points and makes Agile planning more intuitive and efficient.\n\n### Unified Work Items framework\nTo tackle these issues, we've introduced a unified Work Items framework. This new architecture ensures consistency across all planning objects — epics, issues, and tasks — simplifying the user experience and enhancing functionality. By consolidating the underlying code, we can deliver new features and improvements faster, ensuring a smoother and more reliable planning process.\n\n> Read more about [what is to come with GitLab Agile planning](https://about.gitlab.com/blog/first-look-the-new-agile-planning-experience-in-gitlab/).\n\n## Key features of the new epic experience\n\n### Enhanced detail page\nOne of the most notable changes is the revamped epic detail page. The new design offers a cleaner, more intuitive interface, making it easier to manage and track your epics.\n\nHere are some new key features:\n* **Assignees** - assign epics to team members, improving accountability and oversight.\n* **Health status** - quickly gauge the status of your epics with new health indicators.\n* **Time tracking** - create better visibility over time spent and ensure efficient use of resources across your projects.\n* **Ancestry** - view the entire hierarchy lineage of the epic.\n* **Condensed description** - easily view long work item descriptions without having to scroll excessively. Descriptions are truncated by default, with a \"Show more\" link to expand the full text on demand. This streamlines your workflow by allowing you to quickly scan descriptions and only expand them when needed, reducing clutter and improving readability.\n* **Custom color** - customize the color related to epics viewed on the roadmap now with the ability to define a custom color, use HEX or RGB codes, or choose from an expanded predefined palette. \n\n![new epic experience screenshot](https://res.cloudinary.com/about-gitlab-com/image/upload/v1749674437/Blog/Content%20Images/Screenshot_2024-07-10_at_4.22.45_p.m..png)\n\n### Consistency across planning objects\nThe new epic experience aligns closely with the new issues experience coming soon (spoiler alert!) and tasks, providing a seamless and cohesive user experience. This consistency helps streamline workflows and reduces the learning curve for new users.\n\n### Additional functionality\nWe plan to iteratively add exciting new features that will enhance your planning capabilities. Our goal is to allow you to tailor planning processes within GitLab to best fit your organization’s unique needs. Once we’ve released the new epics experience, you can expect to see additional functionality with every release! There are many great features to come – here are some of my favorites:\n- [Templates](https://gitlab.com/gitlab-org/gitlab/-/issues/428690)\n- [Custom fields](https://gitlab.com/groups/gitlab-org/-/epics/235)\n- [Configurable statuses](https://gitlab.com/groups/gitlab-org/-/epics/5099)\n- [Project-level epics](https://gitlab.com/gitlab-org/gitlab/-/issues/31840)\n- [Cloning](https://gitlab.com/gitlab-org/gitlab/-/issues/339768)\n- [Moving to another group/project](https://gitlab.com/gitlab-org/gitlab/-/issues/339766)\n- [Milestones](https://gitlab.com/groups/gitlab-org/-/epics/329)\n\n## Migration expectations\nWe understand that any change can be disruptive, so we've designed the migration to the new epic experience to be as seamless as possible. All existing epic data, APIs, and URLs will continue to function as expected. Users do not need to take any action to prepare for this transition. For our self-managed customers, learn how you can preview the new experience in a test environment ahead of general availability [here](https://docs.gitlab.com/ee/user/group/epics/epic_work_items.html).\n\n## Community feedback and engagement\nWe value your input and encourage you to share your experiences with the new epic experience. Your feedback is essential to help refine and improve our tools. Please visit our [epic experience feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/494462) to provide your thoughts and suggestions.\n\n## What's next\nThe new epic experience in GitLab represents a significant leap forward in our Agile planning capabilities. With enhanced features, improved consistency, and a user-centric approach, we are confident that these changes will greatly benefit your project management processes. We invite you to explore the new features, provide feedback, and stay tuned for more updates as we continue to innovate and improve.\n\n> [Bookmark this page](https://about.gitlab.com/blog/categories/agile-planning/) to keep up with our Agile planning news.",[908,9,695,717,888],{"slug":4089,"featured":6,"template":698},"unveiling-a-new-epic-experience-for-improved-agile-planning","content:en-us:blog:unveiling-a-new-epic-experience-for-improved-agile-planning.yml","Unveiling A New Epic Experience For Improved Agile Planning","en-us/blog/unveiling-a-new-epic-experience-for-improved-agile-planning.yml","en-us/blog/unveiling-a-new-epic-experience-for-improved-agile-planning",{"_path":4095,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4096,"content":4101,"config":4107,"_id":4109,"_type":14,"title":4110,"_source":16,"_file":4111,"_stem":4112,"_extension":19},"/en-us/blog/unveiling-the-guard-framework-to-automate-security-detections-at-gitlab",{"title":4097,"description":4098,"ogTitle":4097,"ogDescription":4098,"noIndex":6,"ogImage":2977,"ogUrl":4099,"ogSiteName":685,"ogType":686,"canonicalUrls":4099,"schema":4100},"Unveiling the GUARD framework to automate security detections at GitLab","The GitLab Universal Automated Response and Detection (GUARD) framework spans creation, maintenance, alert routing and handling, rich metrics collection, and more.","https://about.gitlab.com/blog/unveiling-the-guard-framework-to-automate-security-detections-at-gitlab","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Unveiling the GUARD framework to automate security detections at GitLab\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Harjeet Sharma\"},{\"@type\":\"Person\",\"name\":\"Valentine Mairet\"},{\"@type\":\"Person\",\"name\":\"Matt Coons\"}],\n        \"datePublished\": \"2024-11-26\",\n      }",{"title":4097,"description":4098,"authors":4102,"heroImage":2977,"date":4104,"body":4105,"category":693,"tags":4106,"updatedDate":4104},[4103,2646,2843],"Harjeet Sharma","2024-11-26","[GitLab Security Operations](https://handbook.gitlab.com/handbook/security/security-operations/) leverages automation as a guiding principle to ensure our security engineers have the time to focus on what matters, not manual mundane tasks that can be standardized and automated. We applied this principle to securing the GitLab.com SaaS platform, which generates terabytes of log data daily and requires the GitLab Security team to standardize, automate, and scale security workflows for enhanced protection and efficiency. The result: a new framework we call GitLab Universal Automated Detection and Response, or GUARD – a collaboration between the GitLab [Security Incident Response Team (SIRT)](https://handbook.gitlab.com/handbook/security/security-operations/sirt/) and the [Signals Engineering Team](https://handbook.gitlab.com/handbook/security/security-operations/signals-engineering/).\n\nGUARD covers all aspects of security detection, including:\n* creation\n* maintenance\n* alert routing and handling\n* rich metrics collection\n* alert closure or incident escalation workflow \n\n## The goals of GUARD  \n\nGUARD was created and designed with a set of key goals: \n\n1. **Standardization of SIRT’s detection and alerting pipeline** to produce high-quality detections using a peer reviewed and automation-first focus  \n2. **Reduction of alert fatigue** through alert consolidation, deduplication, risk scoring, and automated feedback  \n3. **Metrics** to measure response efficiency and identify problems early  \n4. **GitLab at the core** by leveraging GitLab as a single source of truth for detection definitions\n\n## GUARD's design principles\n\nGUARD was created out of necessity, with a clear vision of the intended state. Before GUARD, detections did not follow a standard format, alerting metrics were not available, and detection creation and maintenance were ad-hoc. Building a framework that was scalable, GitLab-centric, and able to automate manual tasks was core to the success of GUARD. Due to time efficiencies realized by GUARD, SecOps engineers have more time to solve difficult problems and handle complex incidents. \n\n## GUARD components \n\nThe GUARD framework consists of multiple modules. At the center of GUARD is the GitLab platform itself, acting as a single source of truth for detection rules and providing SIRT the ability to automatically deploy detections as code using GitLab CI/CD. \n\nGUARD includes the following components: \n\n- Detection as Code (DaC) - Deploys detections through the GitLab CI/CD pipeline.  \n- User Attestation Module - Allows GitLab team members to attest to activities flagged as potentially malicious.  \n- Enrichments - Polling historical and contextual information to enrich alerts to make alert triage easier.  \n- Alert Triage and Response - Providing a standard alert triage format and templated escalation actions.  \n- Metrics Generation - Gathering insights on alert handling. \n\nEach GUARD module works together to standardize, automate, and iteratively improve GitLab’s security detections and alerting pipeline. \n\n## GitLab at the core\n\nGitLab is core to critical components of GUARD, acting as a single source for threat detections, automating GUARD’s DaC pipeline through GitLab CI/CD, and acting as a “front end” for GUARD, through which security engineers can add, edit and delete threat detections. \n\nHow GitLab features use GUARD: \n\n- [GitLab projects](https://docs.gitlab.com/ee/user/get_started/get_started_projects.html): GUARD utilizes a GitLab project repository as the single source for GUARD threat detections, stored in JSON format.   \n- [GitLab MRs](https://docs.gitlab.com/ee/user/project/merge_requests/): Any changes to GUARD detections, including new detections utilize GitLab MRs against the main GUARD project. A detailed MR template is utilized in which we validate and record details about the detection being added, edited, or deleted. MR approval rules, including the use of CodeOwners and protected branches, are used to ensure proper detection reviews are completed before merging.   \n- [GitLab issues](https://docs.gitlab.com/ee/user/project/issues/): Bug submissions or other engineering efforts related to GUARD are recorded in GitLab issues.   \n- [GitLab labels](https://docs.gitlab.com/ee/user/project/labels.html): A set of standardized labels ensure security engineers document GUARD changes in a way that is easy to track.   \n- [GitLab CI/CD](https://docs.gitlab.com/ee/ci/): GUARD uses a GitLab CI pipeline to automate the deployment of new/changed/deleted detections to GitLab’s security incident and event management (SIEM). GUARD’s CI pipeline performs a number of validation, testing, and quality checks before successfully passing the pipeline and committing the changes to GitLab’s SIEM platform. \n\n## Metrics generation\n\nInteractions with the alert handling UI are recorded to generate key performance metrics, such as Time to Respond, Time to Resolve, and insights into alerts like true/false positive rates. Additional metadata collected includes an emoji-based sentiment analysis. Engineers handling alerts provide ‘feedback’ about the alerts handled in the form of emojis, so we can take that feedback into account upon iterating on detection rules. \n\nAlert handling metrics are stored in a separate database to create visualizations consulted by engineers and management. These are key to understanding team performance in alert resolution and alert fidelity so that we can always improve.  \n\n## Iterate with us\n\nUsing GitLab as a single source of truth for threat detection code allowed GUARD to extract processes from a specific SIEM technology, supporting greater flexibility, ease of use, modularization, and auditability. \n\n[Iteration](https://handbook.gitlab.com/handbook/values/#iteration) is a core GitLab value – we start with the smallest valuable thing to get fast feedback and efficiently reach a desired end goal. GUARD is no different, and we hope sharing GUARD will help readers iterate towards their own automation improvements. \n\n*This article is the first in a series on GitLab GUARD. Next, we will share details about various aspects of our iterative journey to implement GUARD at GitLab.*",[693,695,9],{"slug":4108,"featured":91,"template":698},"unveiling-the-guard-framework-to-automate-security-detections-at-gitlab","content:en-us:blog:unveiling-the-guard-framework-to-automate-security-detections-at-gitlab.yml","Unveiling The Guard Framework To Automate Security Detections At Gitlab","en-us/blog/unveiling-the-guard-framework-to-automate-security-detections-at-gitlab.yml","en-us/blog/unveiling-the-guard-framework-to-automate-security-detections-at-gitlab",{"_path":4114,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4115,"content":4121,"config":4126,"_id":4128,"_type":14,"title":4129,"_source":16,"_file":4130,"_stem":4131,"_extension":19},"/en-us/blog/use-gitlab-duo-workflow-to-improve-application-quality-assurance",{"title":4116,"description":4117,"ogTitle":4116,"ogDescription":4117,"noIndex":6,"ogImage":4118,"ogUrl":4119,"ogSiteName":685,"ogType":686,"canonicalUrls":4119,"schema":4120},"Use GitLab Duo Workflow to improve application quality assurance","Learn step-by-step how to add unit tests to a Java application using agentic AI (includes a video tutorial).","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097617/Blog/Hero%20Images/Blog/Hero%20Images/Workflow%201800x945_2gQoQIbY9NvjLFpXtsxtXy_1750097616649.png","https://about.gitlab.com/blog/use-gitlab-duo-workflow-to-improve-application-quality-assurance","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Use GitLab Duo Workflow to improve application quality assurance\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Cesar Saavedra\"}],\n        \"datePublished\": \"2025-04-10\",\n      }",{"title":4116,"description":4117,"authors":4122,"heroImage":4118,"date":4123,"body":4124,"category":801,"tags":4125},[798],"2025-04-10","Assuring the quality of your applications via test-driven design, good code\ncoverage, and issue detection is critically important to your customers and\nyour reputation, but it can also be a time-consuming endeavor. [GitLab Duo\nWorkflow](https://about.gitlab.com/gitlab-duo/agent-platform/), agentic AI built\non top of the most comprehensive DevSecOps platform, can help you quickly\ncomplete development tasks such as adding unit tests to a Java application.\nThis tutorial demonstrates how by using this sample [Java\nproject](https://gitlab.com/gitlab-da/playground/csaavedra/gdw/prodmgr-gdw).\n\n\n> GitLab Duo Workflow is currently in private beta. Join the\n[waitlist](https://about.gitlab.com/gitlab-duo/agent-platform/) to see what’s\npossible with AI agents that understand your entire SDLC.\n\n\n## Opening your project in VS Code\n\n\n1. Open the Java project in Visual Studio Code (after cloning it to your\nlocal machine). Ensure that you’re in a feature branch (not the main or\ndefault branch) before you start. If you’re already working on a merge\nrequest, it will have its own associated feature branch.\n\n\n2. (This step is optional.) Navigate to the file that defines the Java class\nfor which you’d like to have GitLab Duo Workflow create unit tests. Inspect\nit so that you can later confirm that the generated unit tests do cover its\nclass members. This is what you would see:\n\n\n![File that defines the Java class for which you’d like to have GitLab Duo\nWorkflow create unit\ntests](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097627/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097627482.png)\n\n\n**Note:** We are assuming that you already enabled the GitLab Duo Workflow\nextension in your VS Code. If not, please refer to the [setup\ndocumentation](https://docs.gitlab.com/user/duo_workflow/#use-workflow-in-vs-code).\n\n\n3. Launch GitLab Duo Workflow by opening the VS Code command palette [Ctrl +\nShift + P] and entering \"GitLab Duo Workflow\" in it and selecting **GitLab:\nShow Duo Workflow**. A tab will appear that looks like this:\n\n\n![Launching GitLab Duo Workflow with VS\nCode](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097628/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097627483.png)\n\n\n4. The next step is to add tests for the default constructor, the\nverification of the object creation, and the initial state of the properties\nof the Product class. To accomplish this, enter the following prompt in the\ntext area in GitLab Duo Workflow:\n\n\n```unset\n\nCreate unit tests for class defined in the Product.java file and store the\nunit tests in its own file titled ProductTest.java\n\n```\n\n\n![Prompt area in GitLab Duo\nWorkflow](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097628/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097627484.png)\n\n\n5. Click the **Start** button in the GitLab Duo Workflow window. Two new\nwindows will appear: one in the center of the screen and one to the right.\nThe one on the right displays the analysis that GitLab Duo Workflow is\nperforming to come up with a plan that will achieve the goal as specified in\nyour prompt. The plan is displayed in the center window. After the analysis\nand the plan are finished, you should see an output like this:\n\n\n![Analysis and plan generated by GitLab Duo\nWorkflow](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097627/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750097627486.png)\n\n\n6. Review the analysis and plan and, if you are satisfied with them, click\n**Approve plan** at the bottom of the window.\n\n\n7. GitLab Duo Workflow will start executing the approved plan and making\nmodifications to your project accordingly.\n\n\n8. Once the execution of the plan is finished, you will see a new directory\n`src/test/java/csaa/jspring/ProductManager` in the project with a new file\nin it named `ProductTest.java`, which contains all the unit tests for the\n`Product.java` class.\n\n\n![New directory in the project iwth a new file name\n`ProductTest.java`](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097628/Blog/Content%20Images/Blog/Content%20Images/image7_aHR0cHM6_1750097627488.png)\n\n\n9. Navigate to the newly created file `ProductTest.java` and you will see\nthat it has some import statements underlined in red indicating some import\nerrors:\n\n\n![`ProductTest.java` include imports statement and error indicators in\nred](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097628/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097627489.png)\n\n\nLet’s have GitLab Duo Workflow fix these for us.\n\n\n**Note:** We could have also asked GitLab Duo Workflow in our first prompt\nto update the `pom.xml` file accordingly. But since we didn’t, let’s fix\nthese errors in a new workflow.\n\n\n## Launching a GitLab Duo Workflow to fix errors in generated code\n\n\n10. Start a new workflow by clicking on the **New workflow** button at the\nbottom of the analysis window on the right side of your screen.\n\n\n![New workflow\nbutton](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097628/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750097627491.png)\n\n\n11. In the prompt text area, enter the following:\n\n\n```unset\n\nThe file ProductTest.java has an error “The import org.junit cannot be\nresolved”. Please fix it\n\n```\n\n\n12. After you approve the proposed plan, GitLab Duo Workflow starts its\nanalysis by reading the current `pom.xml` file. It then edits it and removes\nthe outdated JUnit dependency, and follows that with the addition of the\ncorrect dependency and version for JUnit. Lastly, it reads the\n`ProductTest.java` file to clear all the dependency errors.\n\n\n![GitLab Duo Workflow carrying out analysis by reading\npom.xml](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097627/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750097627492.png)\n\n\n## Watch the tutorial\n\n\nThrough the execution of this plan, GitLab Duo Workflow is effectively\nmaking updates to the project to achieve what was requested in the prompt,\nsaving time and effort, and increasing productivity so that developers can\nspend more time innovating and creating value for their organization.\n\n\nIf you’d like to see what you read above in action, watch the following\nvideo:\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/Tuj7TgqY81Q?si=RReuL1pUsLafvAzs\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\n> Sign up for the [GitLab Duo Workflow private beta\nwaitlist](https://about.gitlab.com/gitlab-duo/agent-platform/) to see what’s\npossible with AI agents that understand your entire SDLC.\n\n\n## Read more about GitLab Duo Workflow and agentic AI\n\n\n- [GitLab Duo Workflow: Enterprise visibility and control for agentic\nAI](https://about.gitlab.com/blog/gitlab-duo-workflow-enterprise-visibility-and-control-for-agentic-ai/)\n\n- [GitLab Duo Workflow\ndocumentation](https://docs.gitlab.com/user/duo_workflow/)\n\n- [GitLab Duo](https://about.gitlab.com/gitlab-duo/)\n\n- [Agentic AI: Unlocking developer potential at scale (The\nSource)](https://about.gitlab.com/the-source/ai/agentic-ai-unlocking-developer-potential-at-scale/)\n",[803,9,719,717,695],{"slug":4127,"featured":6,"template":698},"use-gitlab-duo-workflow-to-improve-application-quality-assurance","content:en-us:blog:use-gitlab-duo-workflow-to-improve-application-quality-assurance.yml","Use Gitlab Duo Workflow To Improve Application Quality Assurance","en-us/blog/use-gitlab-duo-workflow-to-improve-application-quality-assurance.yml","en-us/blog/use-gitlab-duo-workflow-to-improve-application-quality-assurance",{"_path":4133,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4134,"content":4140,"config":4146,"_id":4148,"_type":14,"title":4149,"_source":16,"_file":4150,"_stem":4151,"_extension":19},"/en-us/blog/using-child-pipelines-to-continuously-deploy-to-five-environments",{"title":4135,"description":4136,"ogTitle":4135,"ogDescription":4136,"noIndex":6,"ogImage":4137,"ogUrl":4138,"ogSiteName":685,"ogType":686,"canonicalUrls":4138,"schema":4139},"Using child pipelines to continuously deploy to five environments","Learn how to manage continuous deployment to multiple environments, including temporary, on-the-fly sandboxes, with a minimalist GitLab workflow.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097012/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_397632156_3Ldy1urjMStQCl4qnOBvE0_1750097011626.jpg","https://about.gitlab.com/blog/using-child-pipelines-to-continuously-deploy-to-five-environments","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Using child pipelines to continuously deploy to five environments\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Olivier Dupré\"}],\n        \"datePublished\": \"2024-09-26\",\n      }",{"title":4135,"description":4136,"authors":4141,"heroImage":4137,"date":4143,"body":4144,"category":928,"tags":4145},[4142],"Olivier Dupré","2024-09-26","DevSecOps teams sometimes require the ability to manage continuous\ndeployment across multiple environments — and they need to do so without\nchanging their workflows. The [GitLab DevSecOps\nplatform](https://about.gitlab.com/) supports this need, including\ntemporary, on-the-fly sandboxes, with a minimalist approach. In this\narticle, you'll learn how to run continuous deployment of infrastructure\nusing Terraform, over multiple environments.\n\n\nThis strategy can easily be applied to any project, whether it is\ninfrastructure as code (IaC) relying on another technology, such as\n[Pulumi](https://www.pulumi.com/) or [Ansible](https://www.ansible.com/),\nsource code in any language, or a monorepo that mixes many languages.\n\n\nThe final pipeline that you will have at the end of this tutorial will\ndeploy:\n\n\n* A temporary **review** environment for each feature branch.\n\n* An **integration** environment, easy to wipe out and deployed from the\nmain branch.\n\n* A **QA** environment, also deployed from the main branch, to run quality\nassurance steps.\n\n* A **staging** environment, deployed for every tag. This is the last round\nbefore production.\n\n* A **production** environment, just after the staging environment. This one\nis triggered manually for demonstration, but can also be continuously\ndeployed.\n\n\n>Here is the legend for the flow charts in this article:\n\n> * Round boxes are the GitLab branches.\n\n> * Square boxes are the environments.\n\n> * Text on the arrows are the actions to flow from one box to the next.\n\n> * Angled squares are decision steps.\n\n\n\u003Cpre class=\"mermaid\">\n\nflowchart LR\n    A(main) -->|new feature| B(feature_X)\n\n    B -->|auto deploy| C[review/feature_X]\n    B -->|merge| D(main)\n    C -->|destroy| D\n\n    D -->|auto deploy| E[integration]\n    E -->|manual| F[qa]\n\n    D -->|tag| G(X.Y.Z)\n    F -->|validate| G\n\n    G -->|auto deploy| H[staging]\n    H -->|manual| I{plan}\n    I -->|manual| J[production]\n\u003C/pre>\n\n\nOn each step, you'll learn the [why](#why) and the [what](#what) before\nmoving to the [how](#how). This will help you fully understand and replicate\nthis tutorial.\n\n\n## Why\n\n\n* [Continuous\nintegration](https://about.gitlab.com/topics/ci-cd/#what-is-continuous-integration-ci)\nis almost a de facto standard. Most companies have implemented CI pipelines\nor are willing to standardize their practice.\n\n\n* [Continuous\ndelivery](https://about.gitlab.com/topics/ci-cd/#what-is-continuous-delivery-cd),\nwhich pushes artifacts to a repository or registry at the end of the CI\npipeline, is also popular.\n\n\n* Continuous deployment, which goes further and deploys these artifacts\nautomatically, is less widespread. When it has been implemented, we see it\nessentially in the application field. When discussing continuously\ndeploying  infrastructure, the picture seems less obvious, and is more about\nmanaging several environments. In contrast, testing, securing, and verifying\nthe infrastructure's code seems more challenging. And this is one of the\nfields where DevOps has not yet reached its maturity. One of the other\nfields is to shift security left, integrating security teams and, more\nimportantly, security concerns, earlier in the delivery lifecycle, to\nupgrade from DevOps to ***DevSecOps***.\n\n\nGiven this high-level picture, in this tutorial, you will work toward a\nsimple, yet efficient way to implement DevSecOps for your infrastructure\nthrough the example of deploying resources to five environments, gradually\nprogressing from development to production.\n\n\n__Note:__ Even if I advocate embracing a FinOps approach and reducing the\nnumber of environments, sometimes there are excellent reasons to maintain\nmore than just dev, staging, and production. So, please, adapt the examples\nbelow to match your needs.\n\n\n## What\n\n\nThe rise of cloud technology has driven the usage of IaC. Ansible and\nTerraform were among the first to pave the road here. OpenTofu, Pulumi, AWS\nCDK, Google Deploy Manager, and many others joined the party.\n\n\nDefining IaC is a perfect solution to feel safe when deploying\ninfrastructure. You can test it, deploy it, and replay it again and again\nuntil you reach your goal.\n\n\nUnfortunately, we often see companies maintain several branches, or even\nrepositories, for each of their target environments. And this is where the\nproblems start. They are no longer enforcing a process. They are no longer\nensuring that any change in the production code base has been accurately\ntested in previous environments. And they start seeing drifts from one\nenvironment to the other.\n\n\nI realized this tutorial was necessary when, at a conference I attended,\nevery participant said they do not have a workflow that enforces the\ninfrastructure to be tested thoroughly before being deployed to production.\nAnd they all agreed that sometimes they patch the code directly in\nproduction. Sure, this is fast, but is it safe? How do you report back to\nprevious environments? How do you ensure there are no side effects? How do\nyou control whether you are putting your company at risk with new\nvulnerabilities being pushed too quickly in production?\n\n\nThe question of *why* DevOps teams deploy directly to production is critical\nhere. Is it because the pipeline could be more efficient or faster? Is there\nno automation? Or, even worse, because there is *no way to test accurately\noutside of production*?\n\n\nIn the next section, you will learn how to implement automation for your\ninfrastructure and ensure that your DevOps team can effectively test what\nyou are doing before pushing to any environment impacting others. You will\nsee how your code is secured and its deployment is controlled, end-to-end.\n\n\n## How\n\n\nAs mentioned earlier, there are many IaC languages out there nowadays and we\nobjectively cannot cover *all* of them in a single article. So, I will rely\non a basic Terraform code running on Version 1.4. Please do not focus on the\nIaC language itself but instead on the process that you could apply to your\nown ecosystem.\n\n\n### The Terraform code\n\n\nLet's start with a fundamental Terraform code.\n\n\nWe are going to deploy to AWS, a virtual private cloud (VPC), which is a\nvirtual network. In that VPC, we will deploy a public and a private subnet.\nAs their name implies, they are subnets of the main VPC. Finally, we will\nadd an Elastic Cloud Compute (EC2) instance (a virtual machine) in the\npublic subnet.\n\n\nThis demonstrates the deployment of four resources without adding too much\ncomplexity. The idea is to focus on the pipeline, not the code.\n\n\nHere is the target we want to reach for your repository.\n\n\n![target for\nrepository](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097033/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750097033415.png)\n\n\nLet’s do it step by step.\n\n\nFirst, we declare all resources in a `terraform/main.tf` file:\n\n\n```terraform\n\nprovider \"aws\" {\n  region = var.aws_default_region\n}\n\n\nresource \"aws_vpc\" \"main\" {\n  cidr_block = var.aws_vpc_cidr\n\n  tags = {\n    Name     = var.aws_resources_name\n  }\n}\n\n\nresource \"aws_subnet\" \"public_subnet\" {\n  vpc_id     = aws_vpc.main.id\n  cidr_block = var.aws_public_subnet_cidr\n\n  tags = {\n    Name = \"Public Subnet\"\n  }\n}\n\nresource \"aws_subnet\" \"private_subnet\" {\n  vpc_id     = aws_vpc.main.id\n  cidr_block = var.aws_private_subnet_cidr\n\n  tags = {\n    Name = \"Private Subnet\"\n  }\n}\n\n\nresource \"aws_instance\" \"sandbox\" {\n  ami           = var.aws_ami_id\n  instance_type = var.aws_instance_type\n\n  subnet_id = aws_subnet.public_subnet.id\n\n  tags = {\n    Name     = var.aws_resources_name\n  }\n}\n\n```\n\n\nAs you can see, there are a couple of variables that are needed for this\ncode, so let's declare them in a `terraform/variables.tf` file:\n\n\n```terraform\n\nvariable \"aws_ami_id\" {\n  description = \"The AMI ID of the image being deployed.\"\n  type        = string\n}\n\n\nvariable \"aws_instance_type\" {\n  description = \"The instance type of the VM being deployed.\"\n  type        = string\n  default     = \"t2.micro\"\n}\n\n\nvariable \"aws_vpc_cidr\" {\n  description = \"The CIDR of the VPC.\"\n  type        = string\n  default     = \"10.0.0.0/16\"\n}\n\n\nvariable \"aws_public_subnet_cidr\" {\n  description = \"The CIDR of the public subnet.\"\n  type        = string\n  default     = \"10.0.1.0/24\"\n}\n\n\nvariable \"aws_private_subnet_cidr\" {\n  description = \"The CIDR of the private subnet.\"\n  type        = string\n  default     = \"10.0.2.0/24\"\n}\n\n\nvariable \"aws_default_region\" {\n  description = \"Default region where resources are deployed.\"\n  type        = string\n  default     = \"eu-west-3\"\n}\n\n\nvariable \"aws_resources_name\" {\n  description = \"Default name for the resources.\"\n  type        = string\n  default     = \"demo\"\n}\n\n```\n\n\nAlready, we are almost good to go on the IaC side. What's missing is a way\nto share the Terraform states. For those who don't know, Terraform works\nschematically doing the following:\n\n\n* `plan` checks the differences between the current state of the\ninfrastructure and what is defined in the code. Then, it outputs the\ndifferences.\n\n* `apply` applies the differences in the `plan` and updates the state.\n\n\nFirst round, the state is empty, then it is filled with the details (ID,\netc.) of the resources applied by Terraform.\n\n\nThe problem is: Where is that state stored? How do we share it so several\ndevelopers can collaborate on code?\n\n\nThe solution is fairly simple: Leverage GitLab to store and share the state\nfor you through a [Terraform HTTP\nbackend](https://docs.gitlab.com/ee/user/infrastructure/iac/terraform_state.html).\n\n\nThe first step in using this backend is to create the most simple\n`terraform/backend.tf` file. The second step will be handled in the\npipeline.\n\n\n```terraform\n\nterraform {\n  backend \"http\" {\n  }\n}\n\n```\n\n\nEt voilà! We have a bare minimum Terraform code to deploy these four\nresources. We will provide the variable values at the runtime, so let's do\nthat later.\n\n\n### The workflow\n\n\nThe workflow that we are going to implement now is the following:\n\n\n\u003Cpre class=\"mermaid\">\n\nflowchart LR\n    A(main) -->|new feature| B(feature_X)\n\n    B -->|auto deploy| C[review/feature_X]\n    B -->|merge| D(main)\n    C -->|destroy| D\n\n    D -->|auto deploy| E[integration]\n    E -->|manual| F[qa]\n\n    D -->|tag| G(X.Y.Z)\n    F -->|validate| G\n\n    G -->|auto deploy| H[staging]\n    H -->|manual| I{plan}\n    I -->|manual| J[production]\n\u003C/pre>\n\n\n1. Create a **feature** branch. This will continuously run all scanners on\nthe code to ensure that it is still compliant and secured. This code will be\ncontinuously deployed to a temporary environment `review/feature_branch`\nwith the name of the current branch. This is a safe environment where the\ndevelopers and operations teams can test their code without impacting\nanybody. This is also where we will enforce the process, like enforcing code\nreviews and running scanners, to ensure that the quality and security of the\ncode are acceptable and do not put your assets at risk. The infrastructure\ndeployed by this branch is automatically destroyed when the branch is\nclosed. This helps you keep your budget under control.\n\n\n\u003Cpre class=\"mermaid\">\n\nflowchart LR\n    A(main) -->|new feature| B(feature_X)\n\n    B -->|auto deploy| C[review/feature_X]\n    B -->|merge| D(main)\n    C -->|destroy| D\n\u003C/pre>\n\n\n2. Once approved, the feature branch will be **merged** into the main\nbranch. This is a [protected\nbranch](https://docs.gitlab.com/ee/user/project/protected_branches.html)\nwhere no one can push. This is mandatory to ensure that every change request\nto production is thoroughly tested. That branch is also continuously\ndeployed. The target here is the `integration` environment. To keep this\nenvironment slightly more stable, its deletion is not automated but can be\ntriggered manually.\n\n\n\u003Cpre class=\"mermaid\">\n\nflowchart LR\n    D(main) -->|auto deploy| E[integration]\n\u003C/pre>\n\n\n3. From there, manual approval is required to trigger the next deployment.\nThis will deploy the main branch to the `qa` environment. Here, I have set a\nrule to prevent deletion from the pipeline. The idea is that this\nenvironment should be quite stable (after all, it's already the third\nenvironment), and I would like to prevent deletion by mistake. Feel free to\nadapt the rules to match your processes.\n\n\n\u003Cpre class=\"mermaid\">\n\nflowchart LR\n    D(main)-->|auto deploy| E[integration]\n    E -->|manual| F[qa]\n\u003C/pre>\n\n\n4. To proceed, we will need to **tag** the code. We are relying on\n[protected\ntags](https://docs.gitlab.com/ee/user/project/protected_tags.html) here to\nensure that only a specific set of users are allowed to deploy to these last\ntwo environments. This will immediately trigger a deployment to the\n`staging` environment.\n\n\n\u003Cpre class=\"mermaid\">\n\nflowchart LR\n    D(main) -->|tag| G(X.Y.Z)\n    F[qa] -->|validate| G\n\n    G -->|auto deploy| H[staging]\n\u003C/pre>\n\n\n5. Finally, we are landing to `production`. When discussing infrastructure,\nit is often challenging to deploy progressively (10%, 25%, etc.), so we will\ndeploy the whole infrastructure. Still, we control that deployment with a\nmanual trigger of this last step. And to enforce maximum control on this\nhighly critical environment, we will control it as a [protected\nenvironment](https://docs.gitlab.com/ee/ci/environments/protected_environments.html).\n\n\n\u003Cpre class=\"mermaid\">\n\nflowchart LR\n    H[staging] -->|manual| I{plan}\n    I -->|manual| J[production]\n\u003C/pre>\n\n\n### The pipeline\n\n\nTo implement the above [workflow](#the-workflow), we are now going to\nimplement a pipeline with two [downstream\npipelines](https://docs.gitlab.com/ee/ci/pipelines/downstream_pipelines.html).\n\n\n#### The main pipeline\n\n\nLet's start with the main pipeline. This is the one that will be triggered\nautomatically on any **push to a feature branch**, any **merge to the\ndefault branch**, or any **tag**. *The one* that will do true **continuous\ndeployment** to the following environments: `dev`, `integration`, and\n`staging`. And it is declared in the `.gitlab-ci.yml` file at the root of\nyour project.\n\n\n![the repository\ntarget](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097033/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097033417.png)\n\n\n```yml\n\nStages:\n  - test\n  - environments\n\n.environment:\n  stage: environments\n  variables:\n    TF_ROOT: terraform\n    TF_CLI_ARGS_plan: \"-var-file=../vars/$variables_file.tfvars\"\n  trigger:\n    include: .gitlab-ci/.first-layer.gitlab-ci.yml\n    strategy: depend            # Wait for the triggered pipeline to successfully complete\n    forward:\n      yaml_variables: true      # Forward variables defined in the trigger job\n      pipeline_variables: true  # Forward manual pipeline variables and scheduled pipeline variables\n\nreview:\n  extends: .environment\n  variables:\n    environment: review/$CI_COMMIT_REF_SLUG\n    TF_STATE_NAME: $CI_COMMIT_REF_SLUG\n    variables_file: review\n    TF_VAR_aws_resources_name: $CI_COMMIT_REF_SLUG  # Used in the tag Name of the resources deployed, to easily differenciate them\n  rules:\n    - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH\n\nintegration:\n  extends: .environment\n  variables:\n    environment: integration\n    TF_STATE_NAME: $environment\n    variables_file: $environment\n  rules:\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH\n\nstaging:\n  extends: .environment\n  variables:\n    environment: staging\n    TF_STATE_NAME: $environment\n    variables_file: $environment\n  rules:\n    - if: $CI_COMMIT_TAG\n\n#### TWEAK\n\n# This tweak is needed to display vulnerability results in the merge\nwidgets.\n\n# As soon as this issue https://gitlab.com/gitlab-org/gitlab/-/issues/439700\nis resolved, the `include` instruction below can be removed.\n\n# Until then, the SAST IaC scanners will run in the downstream pipelines,\nbut their results will not be available directly in the merge request\nwidget, making it harder to track them.\n\n# Note: This workaround is perfectly safe and will not slow down your\npipeline.\n\ninclude:\n  - template: Security/SAST-IaC.gitlab-ci.yml\n#### END TWEAK\n\n\n```\n\n\nThis pipeline runs only two stages: `test` and  `environments`. The former\nis needed for the *TWEAK* to run scanners. The later triggers a child\npipeline with a different set of variables for each case defined above (push\nto the branch, merge to the default branch, or tag).\n\n\nWe are adding here a dependency with the keyword\n[strategy:depend](https://docs.gitlab.com/ee/ci/yaml/index.html#triggerstrategy)\non our child pipeline so the pipeline view in GitLab will be updated only\nonce the deployment is finished.\n\n\nAs you can see here, we are defining a base job,\n[hidden](https://docs.gitlab.com/ee/ci/jobs/#hide-jobs), and we are\nextending it with specific variables and rules to trigger only one\ndeployment for each target environment.\n\n\nBesides the [predefined\nvariables](https://docs.gitlab.com/ee/ci/variables/predefined_variables.html),\nwe are using two new entries that we need to define:\n\n1. [The variables specific](#the-variable-definitions) to each environment:\n`../vars/$variables_file.tfvars`\n\n2. [The child pipeline](#the-child-pipeline), defined in\n`.gitlab-ci/.first-layer.gitlab-ci.yml`\n\n\nLet's start with the smallest part, the variable definitions.\n\n\n### The variable definitions\n\n\nWe are going here to mix two solutions to provide variables to Terraform:\n\n\n* The first one using [.tfvars\nfiles](https://developer.hashicorp.com/terraform/language/values/variables#variable-definitions-tfvars-files)\nfor all non-sensitive input, which should be stored within GitLab.\n\n\n![solution one to provide variables to\nTerraform](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097034/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097033419.png)\n\n\n* The second using [environment\nvariables](https://developer.hashicorp.com/terraform/language/values/variables#environment-variables)\nwith the prefix `TF_VAR`. That second way to inject variables, associated\nwith the GitLab capacity to [mask\nvariables](https://docs.gitlab.com/ee/ci/variables/#mask-a-cicd-variable),\n[protect\nthem](https://docs.gitlab.com/ee/ci/variables/#protect-a-cicd-variable), and\n[scope them to\nenvironments](https://docs.gitlab.com/ee/ci/environments/index.html#limit-the-environment-scope-of-a-cicd-variable)\nis a powerful solution to **prevent sensitive information leakages**. (If\nyou consider your production’s private CIDR very sensitive, you could\nprotect it like this, ensuring it is only available for the `production`\nenvironment, for pipelines running against protected branches and tags, and\nthat its value is masked in the job’s logs.)\n\n\n![solution two to provide variables to\nTerraform](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097034/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097033422.png)\n\n\nAdditionally, each variable file should be controlled through a\n[`CODEOWNERS` file](https://docs.gitlab.com/ee/user/project/codeowners/) to\nset who can modify each of them.\n\n\n```\n\n[Production owners] \n\nvars/production.tfvars @operations-group\n\n\n[Staging owners]\n\nvars/staging.tfvars @odupre @operations-group\n\n\n[CodeOwners owners]\n\nCODEOWNERS @odupre\n\n```\n\n\nThis article is not a Terraform training, so we will go very fast and simply\nshow here the `vars/review.tfvars` file. Subsequent environment files are,\nof course, very similar. Just set the non-sensitive variables and their\nvalues here.\n\n\n```shell\n\naws_vpc_cidr = \"10.1.0.0/16\"\n\naws_public_subnet_cidr = \"10.1.1.0/24\"\n\naws_private_subnet_cidr = \"10.1.2.0/24\"\n\n```\n\n\n#### The child pipeline\n\n\nThis one is where the actual work is done. So, it is slightly more complex\nthan the first one. But there is no difficulty here that we cannot overcome\ntogether!\n\n\nAs we have seen in the definition of the [main\npipeline](#the-main-pipeline), that downstream pipeline is declared in the\nfile `.gitlab-ci/.first-layer.gitlab-ci.yml`.\n\n\n![Downstream pipeline declared in\nfile](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097033/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097033424.png)\n\n\nLet's break it down into small chunks. We'll see the big picture at the end.\n\n\n##### Run Terraform commands and secure the code\n\n\nFirst, we want to run a pipeline for Terraform. We, at GitLab, are open\nsource. So, our Terraform template is open source. And you simply need to\ninclude it. This can be achieved with the following snippet:\n\n\n```yml\n\ninclude:\n  - template: Terraform.gitlab-ci.yml\n```\n\n\nThis template runs for you the Terraform checks on the formatting and\nvalidates your code, before planning and applying it. It also allows you to\ndestroy what you have deployed.\n\n\nAnd, because GitLab is the a single, unified DevSecOps platform, we are also\nautomatically including two security scanners within that template to find\npotential threats in your code and warn you before you deploy it to the next\nenvironments.\n\n\nNow that we have checked, secured, built, and deployed our code, let's do\nsome tricks.\n\n\n##### Share cache between jobs\n\n\nWe will cache the job results to reuse them in subsequent pipeline jobs.\nThis is as simple as adding the following piece of code:\n\n\n```yml\n\ndefault:\n  cache:  # Use a shared cache or tagged runners to ensure terraform can run on apply and destroy\n    - key: cache-$CI_COMMIT_REF_SLUG\n      fallback_keys:\n        - cache-$CI_DEFAULT_BRANCH\n      paths:\n        - .\n```\n\n\nHere, we are defining a different cache for each commit, falling back to the\nmain branch name if needed.\n\n\nIf we look carefully at the templates that we are using, we can see that it\nhas some rules to control when jobs are run. We want to run all controls\n(both QA and security) on all branches. So, we are going to override these\nsettings.\n\n\n##### Run controls on all branches\n\n\nGitLab templates are a powerful feature where one can override only a piece\nof the template. Here, we are interested only in overwriting the rules of\nsome jobs to always run quality and security checks. Everything else defined\nfor these jobs will stay as defined in the template.\n\n\n```yml\n\nfmt:\n  rules:\n    - when: always\n\nvalidate:\n  rules:\n    - when: always\n\nkics-iac-sast:\n  rules:\n    - when: always\n\niac-sast:\n  rules:\n    - when: always\n```\n\n\nNow that we have enforced the quality and security controls, we want to\ndifferentiate how the main environments (integration and staging) in the\n[workflow](#the-workflow) and review environments behave. Let's start by\ndefining the main environment’s behavior, and we will tweak this\nconfiguration for the review environments.\n\n\n##### CD to integration and staging\n\n\nAs defined earlier, we want to deploy the main branch and the tags to these\ntwo environments. We are adding rules to control that on both the `build`\nand `deploy` jobs. Then, we want to enable `destroy` only for the\n`integration` as we have defined `staging` to be too critical to be deleted\nwith a single click. This is error-prone and we don't want to do that.\n\n\nFinally, we are linking the `deploy` job to the `destroy` one, so we can\n`stop` the environment directly from GitLab GUI.\n\n\nThe `GIT_STRATEGY` is here to prevent retrieving the code from the source\nbranch in the runner when destroying. This would fail if the branch has been\ndeleted manually, so we are relying on the cache to get everything we need\nto run the Terraform instructions.\n\n\n```yml\n\nbuild:  # terraform plan\n  environment:\n    name: $TF_STATE_NAME\n    action: prepare\n  rules:\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH\n    - if: $CI_COMMIT_TAG\n\ndeploy: # terraform apply --> automatically deploy on corresponding env\n(integration or staging) when merging to default branch or tagging. Second\nlayer environments (qa and production) will be controlled manually\n  environment: \n    name: $TF_STATE_NAME\n    action: start\n    on_stop: destroy\n  rules:\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH\n    - if: $CI_COMMIT_TAG\n\ndestroy:\n  extends: .terraform:destroy\n  variables:\n    GIT_STRATEGY: none\n  dependencies:\n    - build\n  environment:\n    name: $TF_STATE_NAME\n    action: stop\n  rules:\n    - if: $CI_COMMIT_TAG  # Do not destroy production\n      when: never\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $TF_DESTROY == \"true\" # Manually destroy integration env.\n      when: manual\n```\n\n\nAs said, this matches the need to deploy to `integration` and `staging`. But\nwe are still missing a temporary environment where the developers can\nexperience and validate their code without impacts on others. This is where\nthe deployment to the `review` environment takes place.\n\n\n##### CD to review environments\n\n\nDeploying to review environment is not too different than deploying to\n`integration` and `staging`. So we will once again leverage GitLab's\ncapacity to overwrite only pieces of job definition here.\n\n\nFirst, we set rules to run these jobs only on feature branches.\n\n\nThen, we link the `deploy_review` job to `destroy_review`. This will allow\nus to stop the environment **manually** from the GitLab user interface, but\nmore importantly, it will **automatically trigger the environment\ndestruction** when the feature branch is closed. This is a good FinOps\npractice to help you control your operational expenditures.\n\n\nSince Terraform needs a plan file to destroy an infrastructure, exactly like\nit needs one to build an infrastructure, then we are adding a dependency\nfrom `destroy_review` to `build_review`, to retrieve its artifacts.\n\n\nFinally, we see here that the environment's name is set to `$environment`.\nIt has been set in the [main pipeline](#the-main-pipeline) to\n`review/$CI_COMMIT_REF_SLUG`, and forwarded to this child pipeline with the\ninstruction `trigger:forward:yaml_variables:true`.\n\n\n```yml\n\nbuild_review:\n  extends: build\n  rules:\n    - if: $CI_COMMIT_TAG\n      when: never\n    - if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH\n      when: on_success\n\ndeploy_review:\n  extends: deploy\n  dependencies:\n    - build_review\n  environment:\n    name: $environment\n    action: start\n    on_stop: destroy_review\n    # url: https://$CI_ENVIRONMENT_SLUG.example.com\n  rules:\n    - if: $CI_COMMIT_TAG\n      when: never\n    - if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH\n      when: on_success\n\ndestroy_review:\n  extends: destroy\n  dependencies:\n    - build_review\n  environment:\n    name: $environment\n    action: stop\n  rules:\n    - if: $CI_COMMIT_TAG  # Do not destroy production\n      when: never\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH   # Do not destroy staging\n      when: never\n    - when: manual\n```\n\n\nSo, to recap, we now have a pipeline that can:\n\n\n* Deploy temporary review environments, which are automatically cleaned up\nwhen the feature branch is closed\n\n* Continuously deploy the **default branch** to `integration`\n\n* Continuously deploy the **tags** to `staging`\n\n\nLet's now add an extra layer, where we will deploy, based on a manual\ntrigger this time, to `qa` and `production` environments.\n\n\n##### Continously deploy to QA and production\n\n\nBecause not everybody is willing to deploy continuously to production, we\nwill add a manual validation to the next two deployments. From a purely\n**CD** perspective, we would not add this trigger, but take this as an\nopportunity to learn how to run jobs from other triggers.\n\n\nSo far, we have started a [child pipeline](#the-child-pipeline) from the\n[main pipeline](#the-main-pipeline) to run all deployments.\n\n\nSince we want to run other deployments from the default branch and the tags,\nwe will add another layer dedicated to these additional steps. Nothing new\nhere. We will just repeat exactly the same process as the one we only did\nfor the [main pipeline](#the-main-pipeline). Going this way allows you to\nmanipulate as many layers as you need. I have already seen up to nine\nenvironments in some places.\n\n\nWithout arguing once again on the benefits to have fewer environments, the\nprocess that we are using here makes it very easy to implement the same\npipeline all the way from early stages to final delivery, while keeping your\npipeline definition simple and split in small chunks that you can maintain\nat no cost.\n\n\nTo prevent variable conflicts here, we are just using new var names to\nidentify the Terraform state and input file.\n\n\n```yml\n\n.2nd_layer:\n  stage: 2nd_layer\n  variables:\n    TF_ROOT: terraform\n  trigger:\n    include: .gitlab-ci/.second-layer.gitlab-ci.yml\n    # strategy: depend            # Do NOT wait for the downstream pipeline to finish to mark upstream pipeline as successful. Otherwise, all pipelines will fail when reaching the pipeline timeout before deployment to 2nd layer.\n    forward:\n      yaml_variables: true      # Forward variables defined in the trigger job\n      pipeline_variables: true  # Forward manual pipeline variables and scheduled pipeline variables\n\nqa:\n  extends: .2nd_layer\n  variables:\n    TF_STATE_NAME_2: qa\n    environment: $TF_STATE_NAME_2\n    TF_CLI_ARGS_plan_2: \"-var-file=../vars/$TF_STATE_NAME_2.tfvars\"\n  rules:\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH\n\nproduction:\n  extends: .2nd_layer\n  variables:\n    TF_STATE_NAME_2: production\n    environment: $TF_STATE_NAME_2\n    TF_CLI_ARGS_plan_2: \"-var-file=../vars/$TF_STATE_NAME_2.tfvars\"\n  rules:\n    - if: $CI_COMMIT_TAG\n```\n\n\n**One important trick here is the strategy used for the new downstream\npipeline.** We leave that `trigger:strategy` to its default value;\notherwise, the [main pipeline](#the-main-pipeline) would wait for your\n[grand-child pipeline](#the-grand-child-pipeline) to finish. With a manual\ntrigger, this could last for a very long time and make your pipeline\ndashboard harder to read and understand.\n\n\nYou have probably already wondered what is the content of that\n`.gitlab-ci/.second-layer.gitlab-ci.yml` file we are including here.  We\nwill cover that in the next section.\n\n\n##### The first layer complete pipeline definition\n\n\nIf you are looking for a complete view of this first layer (stored in\n`.gitlab-ci/.first-layer.gitlab-ci.yml`), just expand the section below.\n\n\n```yml\n\nvariables:\n  TF_VAR_aws_ami_id: $AWS_AMI_ID\n  TF_VAR_aws_instance_type: $AWS_INSTANCE_TYPE\n  TF_VAR_aws_default_region: $AWS_DEFAULT_REGION\n\ninclude:\n  - template: Terraform.gitlab-ci.yml\n\ndefault:\n  cache:  # Use a shared cache or tagged runners to ensure terraform can run on apply and destroy\n    - key: cache-$CI_COMMIT_REF_SLUG\n      fallback_keys:\n        - cache-$CI_DEFAULT_BRANCH\n      paths:\n        - .\n\nstages:\n  - validate\n  - test\n  - build\n  - deploy\n  - cleanup\n  - 2nd_layer       # Use to deploy a 2nd environment on both the main branch and on the tags\n\nfmt:\n  rules:\n    - when: always\n\nvalidate:\n  rules:\n    - when: always\n\nkics-iac-sast:\n  rules:\n    - if: $SAST_DISABLED == 'true' || $SAST_DISABLED == '1'\n      when: never\n    - if: $SAST_EXCLUDED_ANALYZERS =~ /kics/\n      when: never\n    - when: on_success\n\niac-sast:\n  rules:\n    - if: $SAST_DISABLED == 'true' || $SAST_DISABLED == '1'\n      when: never\n    - if: $SAST_EXCLUDED_ANALYZERS =~ /kics/\n      when: never\n    - when: on_success\n\n###########################################################################################################\n\n## Integration env. and Staging. env\n\n##  * Auto-deploy to Integration on merge to main.\n\n##  * Auto-deploy to Staging on tag.\n\n##  * Integration can be manually destroyed if TF_DESTROY is set to true.\n\n##  * Destroy of next env. is not automated to prevent errors.\n\n###########################################################################################################\n\nbuild:  # terraform plan\n  environment:\n    name: $TF_STATE_NAME\n    action: prepare\n  rules:\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH\n    - if: $CI_COMMIT_TAG\n\ndeploy: # terraform apply --> automatically deploy on corresponding env\n(integration or staging) when merging to default branch or tagging. Second\nlayer environments (qa and production) will be controlled manually\n  environment: \n    name: $TF_STATE_NAME\n    action: start\n    on_stop: destroy\n  rules:\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH\n    - if: $CI_COMMIT_TAG\n\ndestroy:\n  extends: .terraform:destroy\n  variables:\n    GIT_STRATEGY: none\n  dependencies:\n    - build\n  environment:\n    name: $TF_STATE_NAME\n    action: stop\n  rules:\n    - if: $CI_COMMIT_TAG  # Do not destroy production\n      when: never\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $TF_DESTROY == \"true\" # Manually destroy integration env.\n      when: manual\n###########################################################################################################\n\n\n###########################################################################################################\n\n## Dev env.\n\n##  * Temporary environment. Lives and dies with the Merge Request.\n\n##  * Auto-deploy on push to feature branch.\n\n##  * Auto-destroy on when Merge Request is closed.\n\n###########################################################################################################\n\nbuild_review:\n  extends: build\n  rules:\n    - if: $CI_COMMIT_TAG\n      when: never\n    - if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH\n      when: on_success\n\ndeploy_review:\n  extends: deploy\n  dependencies:\n    - build_review\n  environment:\n    name: $environment\n    action: start\n    on_stop: destroy_review\n    # url: https://$CI_ENVIRONMENT_SLUG.example.com\n  rules:\n    - if: $CI_COMMIT_TAG\n      when: never\n    - if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH\n      when: on_success\n\ndestroy_review:\n  extends: destroy\n  dependencies:\n    - build_review\n  environment:\n    name: $environment\n    action: stop\n  rules:\n    - if: $CI_COMMIT_TAG  # Do not destroy production\n      when: never\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH   # Do not destroy staging\n      when: never\n    - when: manual\n###########################################################################################################\n\n\n###########################################################################################################\n\n## Second layer\n\n##  * Deploys from main branch to qa env.\n\n##  * Deploys from tag to production.\n\n###########################################################################################################\n\n.2nd_layer:\n  stage: 2nd_layer\n  variables:\n    TF_ROOT: terraform\n  trigger:\n    include: .gitlab-ci/.second-layer.gitlab-ci.yml\n    # strategy: depend            # Do NOT wait for the downstream pipeline to finish to mark upstream pipeline as successful. Otherwise, all pipelines will fail when reaching the pipeline timeout before deployment to 2nd layer.\n    forward:\n      yaml_variables: true      # Forward variables defined in the trigger job\n      pipeline_variables: true  # Forward manual pipeline variables and scheduled pipeline variables\n\nqa:\n  extends: .2nd_layer\n  variables:\n    TF_STATE_NAME_2: qa\n    environment: $TF_STATE_NAME_2\n    TF_CLI_ARGS_plan_2: \"-var-file=../vars/$TF_STATE_NAME_2.tfvars\"\n  rules:\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH\n\nproduction:\n  extends: .2nd_layer\n  variables:\n    TF_STATE_NAME_2: production\n    environment: $TF_STATE_NAME_2\n    TF_CLI_ARGS_plan_2: \"-var-file=../vars/$TF_STATE_NAME_2.tfvars\"\n  rules:\n    - if: $CI_COMMIT_TAG\n###########################################################################################################\n\n```\n\n\nAt this stage, we are already deploying safely to three environments. That\nis my personal ideal recommendation. However, if you need more environments,\nadd them to your CD pipeline.\n\n\nYou have certainly already noted that we include a downstream pipeline with\nthe keyword `trigger:include`. This includes the file\n`.gitlab-ci/.second-layer.gitlab-ci.yml`. We want to run almost the same\npipeline so obviously, its content is very similar to the one we have\ndetailed above. The main advantage here to define this [grand-child\npipeline](#the-grand-child-pipeline) is that it lives on its own, making\nboth variables and rules way easier to define.\n\n\n### The grand-child pipeline\n\n\nThis second layer pipeline is a brand new pipeline. Hence, it needs to mimic\nthe first layer definition with:\n\n\n* [Inclusion of the Terraform\ntemplate](#run-terraform-commands-and-secure-the-code).\n\n* [Enforcement of security checks](#run-controls-on-all-branches). Terraform\nvalidation would be duplicates of the first layer, but security scanners may\nfind threats that did not yet exist when scanners previously ran (for\nexample, if you deploy to production a couple of days after your deployment\nto staging).\n\n* [Overwrite build and deploy jobs to set specific\nrules](#cd-to-review-environments). Note that the `destroy` stage is no\nlonger automated to prevent too fast deletions.\n\n\nAs explained above, the `TF_STATE_NAME` and `TF_CLI_ARGS_plan` have been\nprovided from the [main pipeline](#the-main-pipeline) to the [child\npipeline](#the-child-pipeline). We needed another variable name to pass\nthese values from the [child pipeline](#the-child-pipeline) to here, the\n[grand-child pipeline](#the-grand-child-pipeline). This is why they are\npostfixed with `_2` in the child pipeline and the value is copied back to\nthe appropriate variable during the `before_script` here.\n\n\nSince we have already broken down each step above, we can zoom out here\ndirectly to the broad view of the global second layer definition (stored in\n`.gitlab-ci/.second-layer.gitlab-ci.yml`).\n\n\n```yml\n\n# Use to deploy a second environment on both the default branch and the\ntags.\n\n\ninclude:\n  template: Terraform.gitlab-ci.yml\n\nstages:\n  - validate\n  - test\n  - build\n  - deploy\n\nfmt:\n  rules:\n    - when: never\n\nvalidate:\n  rules:\n    - when: never\n\nkics-iac-sast:\n  rules:\n    - if: $SAST_DISABLED == 'true' || $SAST_DISABLED == '1'\n      when: never\n    - if: $SAST_EXCLUDED_ANALYZERS =~ /kics/\n      when: never\n    - when: always\n\n###########################################################################################################\n\n## QA env. and Prod. env\n\n##  * Manually trigger build and auto-deploy in QA\n\n##  * Manually trigger both build and deploy in Production\n\n##  * Destroy of these env. is not automated to prevent errors.\n\n###########################################################################################################\n\nbuild:  # terraform plan\n  cache:  # Use a shared cache or tagged runners to ensure terraform can run on apply and destroy\n    - key: $TF_STATE_NAME_2\n      fallback_keys:\n        - cache-$CI_DEFAULT_BRANCH\n      paths:\n        - .\n  environment:\n    name: $TF_STATE_NAME_2\n    action: prepare\n  before_script:  # Hack to set new variable values on the second layer, while still using the same variable names. Otherwise, due to variable precedence order, setting new value in the trigger job, does not cascade these new values to the downstream pipeline\n    - TF_STATE_NAME=$TF_STATE_NAME_2\n    - TF_CLI_ARGS_plan=$TF_CLI_ARGS_plan_2\n  rules:\n    - when: manual\n\ndeploy: # terraform apply\n  cache:  # Use a shared cache or tagged runners to ensure terraform can run on apply and destroy\n    - key: $TF_STATE_NAME_2\n      fallback_keys:\n        - cache-$CI_DEFAULT_BRANCH\n      paths:\n        - .\n  environment: \n    name: $TF_STATE_NAME_2\n    action: start\n  before_script:  # Hack to set new variable values on the second layer, while still using the same variable names. Otherwise, due to variable precedence order, setting new value in the trigger job, does not cascade these new values to the downstream pipeline\n    - TF_STATE_NAME=$TF_STATE_NAME_2\n    - TF_CLI_ARGS_plan=$TF_CLI_ARGS_plan_2\n  rules:\n    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH\n    - if: $CI_COMMIT_TAG && $TF_AUTO_DEPLOY == \"true\"\n    - if: $CI_COMMIT_TAG\n      when: manual\n###########################################################################################################\n\n```\n\n\nEt voilà. **We are ready to go.** Feel free to change the way you control\nyour job executions, leveraging for example GitLab's capacity to [delay a\njob](https://docs.gitlab.com/ee/ci/jobs/job_control.html#run-a-job-after-a-delay)\nbefore deploying to production.\n\n\n## Try it yourself\n\n\nWe finally reached our destination. We are now able to control **deployments\nto five different environments**, with only the **feature branches**, the\n**main branch**, and **tags**.\n\n* We are intensively reusing GitLab open source templates to ensure\nefficiency and security in our pipelines.\n\n* We are leveraging GitLab template capacities to overwrite only the blocks\nthat need custom control.\n\n* We have split the pipeline in small chunks, controlling the downstream\npipelines to match exactly what we need.\n\n\nFrom there, the floor is yours. You could, for example, easily update the\nmain pipeline to trigger downstream pipelines for your software source code,\nwith the\n[trigger:rules:changes](https://docs.gitlab.com/ee/ci/yaml/#ruleschanges)\nkeyword. And use another\n[template](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/gitlab/ci/templates/)\ndepending on the changes that happened. But that is another story.\n",[109,1542,1543,9,719],{"slug":4147,"featured":6,"template":698},"using-child-pipelines-to-continuously-deploy-to-five-environments","content:en-us:blog:using-child-pipelines-to-continuously-deploy-to-five-environments.yml","Using Child Pipelines To Continuously Deploy To Five Environments","en-us/blog/using-child-pipelines-to-continuously-deploy-to-five-environments.yml","en-us/blog/using-child-pipelines-to-continuously-deploy-to-five-environments",{"_path":4153,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4154,"content":4160,"config":4166,"_id":4168,"_type":14,"title":4169,"_source":16,"_file":4170,"_stem":4171,"_extension":19},"/en-us/blog/what-the-digital-operational-resilience-act-means-for-banks",{"title":4155,"description":4156,"ogTitle":4155,"ogDescription":4156,"noIndex":6,"ogImage":4157,"ogUrl":4158,"ogSiteName":685,"ogType":686,"canonicalUrls":4158,"schema":4159},"What the Digital Operational Resilience Act means for banks","Find out why financial institutions need to understand the DORA legislative framework introduced in the European Union to strengthen operational resilience.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098149/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%284%29_3LZkiDjHLjhqEkvOvBsVKp_1750098149751.png","https://about.gitlab.com/blog/what-the-digital-operational-resilience-act-means-for-banks","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"What the Digital Operational Resilience Act means for banks\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Joshua Carroll\"},{\"@type\":\"Person\",\"name\":\"Allie Holland\"}],\n        \"datePublished\": \"2025-01-15\",\n      }",{"title":4155,"description":4156,"authors":4161,"heroImage":4157,"date":4163,"body":4164,"category":693,"tags":4165},[4162,2392],"Joshua Carroll","2025-01-15","Developers play a critical role in ensuring banks remain competitive and compliant. One framework gaining significant attention is DORA. If you’re thinking of the [DevOps Research and Assessment (DORA) metrics](https://docs.gitlab.com/ee/user/analytics/dora_metrics.html), this is something different. The [Digital Operational Resilience Act](https://www.eiopa.europa.eu/digital-operational-resilience-act-dora_en) is a new regulatory framework focused on safeguarding financial institutions against digital disruptions. For developers, understanding DORA regulations is not just a regulatory necessity; it’s an opportunity to drive innovation and enhance the overall stability of their organizations. \n\n## What is DORA regulation?\n\nThe Digital Operational Resilience Act (DORA) is a legislative framework introduced by the European Union to strengthen the operational resilience of financial institutions. DORA aims to ensure that banks and other financial services providers can withstand, respond to, and recover from all types of information and communication technology (ICT) related disruptions and threats. DORA outlines specific requirements for risk management, incident reporting, testing, and the overall governance of digital operations.\n\n## Core requirements of DORA\n\nDORA introduces several critical requirements for financial institutions to ensure they can maintain operational continuity, including:\n\n1. **Risk management:** Organizations must establish systems to identify, assess, and manage risks related to their digital operations. DORA fundamentally redefines the landscape of ICT risk management by placing accountability at the executive level. Detailed in [Article 5](https://www.digital-operational-resilience-act.com/Article_5.html), the management body of an organization is now entrusted with the ultimate responsibility for overseeing ICT risk management. This includes conducting regular risk assessments and implementing strategies to mitigate identified vulnerabilities.   \n\n2. **Regular testing:** Financial institutions are required to conduct systematic testing of their ICT systems to ensure they can handle potential disruptions effectively. This includes stress testing, scenario analysis, and recovery simulations to evaluate the resilience of their operations.  \n\n3. **Incident reporting:** Significant ICT-related incidents must be reported to regulators within specified timeframes. This requirement enhances oversight and allows regulators to coordinate responses across the financial sector, ensuring a unified approach to managing crises. The most recent [Regulatory Technical Standards](https://www.eba.europa.eu/sites/default/files/2023-12/ecc72f1c-c68a-4e64-97dd-47470117c3ae/JC%202023%2070%20-%20%20CP%20on%20draft%20RTS%20and%20ITS%20on%20major%20incident%20reporting%20under%20DORA.pdf) proposes time limits for reporting of the initial notification of four hours after classification and 24 hours after detection of the incident, 72 hours for reporting of the intermediate report, and one month for the reporting of the final report.   \n\n4. **Third-party risk management:** DORA also focuses on managing risks associated with outsourcing services to third-party providers. Organizations must ensure that their partners adhere to the same stringent standards, conducting due diligence and regular assessments of third-party performance. One of the biggest shifts for a bank is oftentimes centered around the establishment of exit strategies, detailed in [Article 28](https://www.digital-operational-resilience-act.com/Article_28.html).\n\nOrganizations need to prepare for scenarios where a third-party provider can no longer meet their operational needs or compliance obligations. This proactive approach ensures continuity and minimizes disruption in critical services. GitLab offers a distinct advantage in this area, as our platform is cloud-agnostic. This flexibility allows organizations to easily adapt their operations and transition between service providers as needed, simplifying the implementation of effective exit strategies.\n\nFor those who are interested in learning a bit more about the specifics listed above, the formal regulation documentation can be found [here](https://eur-lex.europa.eu/legal-content/EN/TXT/?uri=CELEX:32022R2554).  \n\n## Why DORA matters to developers\n\nDORA is important for developers to understand for the following reasons:\n\n1. **Enhanced security posture:** For developers, DORA emphasizes the importance of robust cybersecurity measures. As cyber threats continue to evolve, being part of an organization that prioritizes security means you’ll need to build applications with security in mind from the beginning, with a shift [security left mindset](https://www.youtube.com/watch?v=XnYstHObqlA). Compliance with DORA requires implementing best practices in secure coding, conducting regular vulnerability assessments, and ensuring that security controls are integrated into the software development lifecycle.  \n2. **Focus on resilience:** DORA requires banks to have clear strategies for operational resilience. Developers must now design systems that go beyond surface level functionality, building applications that can withstand failures and protect against disruptions. Having a clear understanding of DORA can guide you in architecting applications that can seamlessly handle disruptions, whether from a technical failure or an external threat.  \n3. **Collaboration and cross-functional teams:** Implementing DORA effectively requires a collaborative approach, which could pose a challenge in siloed banking structures. Developers will need to work closely with cybersecurity teams, risk management, and compliance officers.   \n4. **Agility in incident response:** DORA mandates that organizations report and respond to incidents efficiently. Developers must be equipped to quickly address vulnerabilities and deploy fixes.   \n5. **Continuous improvement culture:** DORA encourages a culture of continuous improvement and testing. This requires the adoption of practices like chaos engineering and regular stress testing of applications to ensure they can handle unexpected scenarios. Embracing these methodologies will not only help meet regulatory requirements but also improve the overall quality and reliability of the software that is built.\n\n## GitLab's role in DORA compliance\n\nGitLab is prepared to help financial institutions meet DORA’s stringent requirements. With [security built into the earliest stages of deployment pipelines](https://about.gitlab.com/topics/ci-cd/shift-left-devops/), GitLab is strategically positioned to equip organizations with software that is [Secure by Design](https://about.gitlab.com/blog/secure-by-design-principles-meet-devsecops-innovation-in-gitlab-17/). \n\n* **Robust risk management:** GitLab’s built-in tools enable organizations to identify, assess, and manage risk across their digital landscape. By utilizing features like [issue tracking](https://docs.gitlab.com/ee/user/project/issues/index.html) and [merge requests](https://docs.gitlab.com/ee/user/project/merge_requests/), teams can collaboratively manage and document risks throughout the software development lifecycle. GitLab provides several tools that enable organizations to manage these requirements effectively:  \n      - **Audit logs and compliance dashboards:** GitLab's [audit logs](https://docs.gitlab.com/ee/user/compliance/audit_events.html) capture all activities within the platform, giving financial institutions a full history of changes made to code, configurations, and infrastructure. These logs allow compliance teams to review user actions and detect irregularities that could pose risks. Additionally, GitLab’s [compliance dashboard](https://docs.gitlab.com/ee/user/compliance/compliance_center/compliance_standards_adherence_dashboard.html) provides real-time visibility into which projects comply with established policies, making it easier to manage large-scale governance.  \n      - **Custom compliance frameworks:** GitLab allows organizations to create [custom compliance frameworks](https://docs.gitlab.com/ee/user/group/compliance_frameworks.html#:~:text=You%20can%20create%20a%20compliance,on%20which%20it%20is%20applied.) that are tailored to an organization's regulatory requirements and geographical regions. These frameworks ensure consistent enforcement of security and operational standards, meeting DORA’s systematic risk management objectives.  \n\n* **Comprehensive application security testing:** Security vulnerabilities pose significant regulatory, financial, and reputational risks. GitLab addresses these challenges by building security testing directly into its CI/CD pipelines, ensuring vulnerabilities are detected and mitigated before deployment. This approach leverages multiple [testing methodologies](https://about.gitlab.com/stages-devops-lifecycle/secure/):\n    - [Static Application Security Testing (SAST)](https://docs.gitlab.com/ee/user/application_security/sast/): Analyzes source code for security vulnerabilities.\n    - [Dynamic Application Security Testing (DAST)](https://docs.gitlab.com/ee/user/application_security/dast/): Tests running applications for security weaknesses.\n    - [Secret Detection](https://docs.gitlab.com/ee/user/application_security/secret_detection/): Prevents sensitive information from being exposed in code.\n    - [Fuzz Testing](https://docs.gitlab.com/ee/user/application_security/coverage_fuzzing/): Identifies potential security issues by testing with random inputs.\n\n  GitLab’s security tools run automated tests that scan for vulnerabilities in code, containers, and third-party dependencies. These features help organizations meet the DORA requirement to continuously test IT systems, providing peace of mind that potential vulnerabilities are addressed before they become operational risks.\n\n  ![GitLab features for DORA requirements in EU](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098160/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098160209.png)\n\n* **Efficient incident reporting:** GitLab’s [project management capabilities](https://docs.gitlab.com/ee/topics/plan_and_track.html) enable teams to effectively log and track significant ICT-related incidents. This centralized documentation, combined with [continuous vulnerability scanning](https://docs.gitlab.com/ee/user/application_security/continuous_vulnerability_scanning/), facilitates timely reporting to regulators, enhances visibility, and supports compliance with DORA's incident reporting requirements.\n  [GitLab's incident management features](https://docs.gitlab.com/ee/operations/incident_management/incidents.html#:~:text=The%20incident%20summary%20can%20be,displays%20them%20below%20the%20summary.) streamline the workflow of remediation, making it easier for teams to identify, trace, and act on incidents as they arise.\n    - Incident management tools: GitLab includes built-in tools for managing incidents, serving as a centralized record for teams to report, assess, and mitigate issues effectively. Users can create incident records, assign ownership, and document the investigation and resolution process. This centralization not only streamlines incident management but also enables teams to trace back and determine accountability for each incident. By facilitating clear ownership and structured workflows, GitLab positions organizations to effectively meet DORA’s requirements for effective incident response plans.\n    - Real-time alerts and monitoring integrations: By integrating with monitoring tools such as [Prometheus](https://prometheus.io/) and Grafana, GitLab allows financial institutions to receive real-time alerts when issues arise. These alerts can trigger automated incident responses, helping teams address potential threats before they escalate, in line with DORA’s emphasis on quick reaction times.\n\n* **Third-party risk management:** GitLab enables organizations to work closely with third-party providers, ensuring they adhere to the same rigorous standards required by the industry. The platform provides both technical controls and governance features to manage third-party risks:\n    * Technical Controls\n       - [Dependency Scanning](https://docs.gitlab.com/ee/user/application_security/dependency_scanning/): Automatically detects vulnerabilities in third-party libraries and open-source components\n      - [Software Composition Analysis](https://about.gitlab.com/blog/reduce-supply-chain-risk-with-smarter-vulnerability-prioritization/): Provides detailed inventory and security status of all external dependencies\n      -  [Container Scanning](https://docs.gitlab.com/ee/user/application_security/container_scanning/): Identifies vulnerabilities in third-party container images   \n\n   * Governance Features\n      - [Policy Enforcement](https://docs.gitlab.com/ee/user/application_security/policies/): Automatically enforce security policies for external code and components\n      -  [Integration Controls](https://docs.gitlab.com/ee/api/integrations.html): GitLab's API-first approach ensures secure and monitored integration with external systems\n      -   [Audit Trails](https://docs.gitlab.com/ee/user/compliance/audit_events.html): Maintain comprehensive logs of all third-party component usage and changes\n\n  These capabilities help organizations meet DORA's requirements for third-party risk management while maintaining operational efficiency.\n\nThe EU’s DORA regulations present new challenges for financial institutions, requiring them to enhance their governance, cybersecurity, and resilience frameworks. GitLab offers powerful features that address the key pillars of DORA, from incident management to cybersecurity testing and third-party risk management. By integrating GitLab into operational processes, financial institutions can streamline their compliance efforts, reduce risks, and ensure that they meet regulatory requirements with greater efficiency. GitLab provides a solid foundation for organizations seeking to stay ahead of the evolving regulatory landscape while maintaining strong security and operational resilience.\n\n> #### [Reach out](https://about.gitlab.com/solutions/finance/) to learn more about how GitLab can help meet your regulatory challenges.\n\n## Read more\n\n- [GitLab supports banks in navigating regulatory challenges](https://about.gitlab.com/blog/gitlab-supports-banks-in-navigating-regulatory-challenges/)\n- [Meet regulatory standards with GitLab security and compliance](https://about.gitlab.com/blog/meet-regulatory-standards-with-gitlab/)\n- [How to ensure separation of duties and enforce compliance with GitLab](https://about.gitlab.com/blog/ensuring-compliance/)\n",[570,693,9,951],{"slug":4167,"featured":6,"template":698},"what-the-digital-operational-resilience-act-means-for-banks","content:en-us:blog:what-the-digital-operational-resilience-act-means-for-banks.yml","What The Digital Operational Resilience Act Means For Banks","en-us/blog/what-the-digital-operational-resilience-act-means-for-banks.yml","en-us/blog/what-the-digital-operational-resilience-act-means-for-banks",{"_path":4173,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4174,"content":4180,"config":4184,"_id":4186,"_type":14,"title":4187,"_source":16,"_file":4188,"_stem":4189,"_extension":19},"/en-us/blog/why-are-organizations-moving-to-a-unified-devsecops-platform",{"title":4175,"description":4176,"ogTitle":4175,"ogDescription":4176,"noIndex":6,"ogImage":4177,"ogUrl":4178,"ogSiteName":685,"ogType":686,"canonicalUrls":4178,"schema":4179},"Why are organizations moving to a unified DevSecOps platform?","Learn about GitLab's comprehensive, unified DevSecOps platform, which integrates tools, enhances security, and leverages AI for efficient software development.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097063/Blog/Hero%20Images/Blog/Hero%20Images/securitylifecycle-light_securitylifecycle-light.png_1750097063583.png","https://about.gitlab.com/blog/why-are-organizations-moving-to-a-unified-devsecops-platform","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Why are organizations moving to a unified DevSecOps platform?\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Itzik Gan Baruch\"}],\n        \"datePublished\": \"2025-06-02\",\n      }",{"title":4175,"description":4176,"authors":4181,"heroImage":4177,"date":799,"body":4182,"category":1277,"tags":4183},[3391],"In today’s modern software development landscape, many organizations are migrating to the cloud and adopting DevSecOps processes. However, this transition presents a significant challenge: a proliferation of tools and legacy systems not designed for modern development. To adapt these systems to DevSecOps, organizations must create integrations between multiple tools for task management, CI/CD, security, monitoring, and more. The result? Operational complexity, high maintenance costs, and disrupted collaboration between development and operations teams. Additionally, developers experience frustration as they constantly switch between different tools to complete a single development flow – from planning to production.\n\n![The complexity and operational costs of integrating multiple tools into a DevSecOps process](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097077/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097077287.jpg)\n\n\u003Ccenter>\u003Ci>How complex it can be to integrate multiple tools into a DevSecOps process\u003C/i>\u003C/center>\n\n\u003Cbr>\u003C/br>\n\nThe good news is that a solution exists: A comprehensive DevSecOps platform offering a unified approach to software development.\n\nThese platforms are built for organizations operating in cloud-based and DevSecOps environments, consolidating all software development stages — from code management, CI/CD processes, task management, and security to AI-driven automation — into a single platform. Centralizing all software development workflows in a unified interface enables development and operations teams to work more efficiently, streamline communication, and minimize operational complexities and disruptions.\n\nFurthermore, the developer experience significantly improves — engineers are much happier working with a product designed specifically for modern development needs.\n\nIn the sections below, we’ll explore how GitLab helps teams overcome common challenges — whether it’s managing projects and tasks, ensuring security and compliance, or adopting AI-powered development tools – all within a single, unified platform.\n\n## Integrated Agile project management\n\nGitLab provides a holistic solution in which project and task management are fully integrated across all stages of the software development lifecycle, such as CI/CD, enabling real-time tracking of development progress. Issues and epics directly link to automation processes, allowing a seamless flow from planning to production deployment. This approach enhances transparency across teams, reduces delays, and ensures that all stakeholders have a clear view of the development status in real-time.\n\n![Issues and epics directly link to automation processes, allowing a seamless flow from planning to production deployment.](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097077/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097077288.jpg)\n\n## Built-in security\nGitLab strongly emphasizes integrating security capabilities end-to-end (security first). The platform integrates a wide range of automated security scanners, including:\n\n- [Dependency Scanning](https://docs.gitlab.com/user/application_security/dependency_scanning/)\n- [Static Application Security Testing (SAST)](https://docs.gitlab.com/user/application_security/sast/)\n- [Dynamic Application Security Testing (DAST)](https://docs.gitlab.com/user/application_security/dast/)\n- [Secret Detection](https://docs.gitlab.com/user/application_security/secret_detection/)\n- [Container Scanning](https://docs.gitlab.com/user/application_security/container_scanning/)\n\n![Security scanning capabilities integrated into the CI/CD process at various development stages](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097077/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097077289.jpg)\n\n\u003Ccenter>\u003Ci>Security scanning capabilities integrated into the CI/CD process at various development stages\u003C/i>\u003C/center>\n\n\u003Cbr>\u003C/br>\n\nThese security checks are built directly into every phase of the software development lifecycle, including the CI/CD pipeline, to provide developers with immediate feedback on potential security issues early in the development cycle.\n\n## Compliance and regulatory requirements\n\nBeyond efficiency and user experience, many organizations — especially those in regulated industries such as financial institutions or large enterprises — must ensure their processes comply with strict security and compliance standards. They need the ability to enforce policies for different projects, such as mandating a security scanner every time a CI/CD pipeline runs on specific code branches (e.g., main or protected branches) or requiring specific approvals before merging code into the main branch.\n\nWith GitLab, this becomes easier through [Compliance Frameworks](https://about.gitlab.com/blog/introducing-custom-compliance-frameworks-in-gitlab/), a feature that allows organizations to define and enforce structured policies for selected projects. This ensures compliance with automatic regulatory and security requirements while maintaining a seamless and efficient developer workflow.\n\n## AI-powered development\n\n[GitLab Duo](https://about.gitlab.com/gitlab-duo/) provides AI-driven assistance across all development stages, eliminating the need to switch to external tools. Every AI-powered request is processed within the full context of the project and codebase, enabling smarter and more efficient work.\n\nAI can perform example tasks such as:\n- automatic task description generation\n- smart summarization of issue discussions, saving developers valuable time\n- advanced code review capabilities\n- code improvement and optimization suggestions\n- automated test generation\n- security vulnerability detection and remediation\n- troubleshooting root cause analysis for CI pipeline failures\n- privacy and Data Security\n\nUnderstanding the needs of regulated organizations, particularly in the public and financial sectors, GitLab offers a unique solution for running AI models in a secure environment. GitLab Duo Self-Hosted enables organizations to maintain full control over data privacy, security, and the deployment of large language models ([LLMs](https://about.gitlab.com/blog/what-is-a-large-language-model-llm/)) in their own infrastructure, ensuring:\n- data privacy protection\n- compliance with regulatory requirements\n- maximum security\n- AI benefits without external network dependencies or risks\n\n## Summary\n\nOrganizations need a comprehensive DevSecOps platform to streamline processes, enhance security, and accelerate innovation. GitLab delivers precisely that — a single application consolidating all essential development, security, and operational tools with built-in security integration and AI-powered automation.\n\nReady to see GitLab in action? Explore interactive demos of:\n\n- [GitLab Premium and Ultimate with Duo](https://gitlab.navattic.com/gitlab-premium-with-duo) – experience AI-powered development assistance\n\n- [Adding security to the CI/CD pipeline](https://gitlab.navattic.com/gitlab-scans) – see how integrated security scanning protects your software\n\n- [Compliance frameworks](https://gitlab.navattic.com/compliance) – discover how GitLab enforces policies across projects for better governance\n\n> Join the GitLab 18 virtual launch event to learn about the future of the DevSecOps platform, including the role of agentic AI. [Register today!](https://about.gitlab.com/eighteen/)",[951,9,717],{"slug":4185,"featured":6,"template":698},"why-are-organizations-moving-to-a-unified-devsecops-platform","content:en-us:blog:why-are-organizations-moving-to-a-unified-devsecops-platform.yml","Why Are Organizations Moving To A Unified Devsecops Platform","en-us/blog/why-are-organizations-moving-to-a-unified-devsecops-platform.yml","en-us/blog/why-are-organizations-moving-to-a-unified-devsecops-platform",{"_path":4191,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4192,"content":4198,"config":4203,"_id":4205,"_type":14,"title":4206,"_source":16,"_file":4207,"_stem":4208,"_extension":19},"/en-us/blog/why-hackerone-gets-love-letters-from-developers",{"title":4193,"description":4194,"ogTitle":4193,"ogDescription":4194,"noIndex":6,"ogImage":4195,"ogUrl":4196,"ogSiteName":685,"ogType":686,"canonicalUrls":4196,"schema":4197},"Why HackerOne gets love letters from its developers","Learn how HackerOne is making developers more productive while ensuring that security is built into their software development processes.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662439/Blog/Hero%20Images/codewithheart.png","https://about.gitlab.com/blog/why-hackerone-gets-love-letters-from-developers","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Why HackerOne gets love letters from its developers\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Sharon Gaudin\"}],\n        \"datePublished\": \"2023-11-14\",\n      }",{"title":4193,"description":4194,"authors":4199,"heroImage":4195,"date":4200,"body":4201,"category":1073,"tags":4202},[1498],"2023-11-14","At [HackerOne](/customers/hackerone/), a cybersecurity company, using the GitLab DevSecOps Platform has changed developers’ team experience and culture so much they’re basically sending the company love letters about their jobs.\n\nThis is no exaggeration, according to HackerOne Senior Director of Platform and Infrastructure Russell Owen, who spoke to an audience at GitLab’s DevSecOps World Tour in Mountain View, California, this fall. The 11-year-old company adopted GitLab in 2018 for source code and issues management, CI/CD, and security and compliance – features that didn’t exist in its previous tooling system. Since then, developers have become more productive — and happier.\n\nHackerOne isn’t alone here. According to GitLab’s [2023 Global DevSecOps Survey](https://about.gitlab.com/developer-survey/), 28% of the more than 5,000 respondents said using DevOps made their developers happier. Productivity, efficiency, and automation go a long way to making developers’ jobs easier and more enjoyable.\n\n## Measuring for DevSecOps impact\n\n“We run surveys every quarter. Are the developers happy? What do they find frustrating? Where's the friction?” Owen said in an on-stage interview with [Sherrod Patching](https://about.gitlab.com/company/team/#spatching), vice president of customer success management at GitLab. “We compare our metrics across the industry to see how we're doing.”\n\nAnd they’re doing well. “For instance, just making our [CI/CD pipelines](/blog/how-to-learn-ci-cd-fast/) as tight as possible makes people more effective. We've done a lot of work on optimizing that inside of GitLab,” Owen added. “And we get what count as love letters from our developers. People are so excited because the pipeline times have come down substantially, and it has a direct impact on the productivity of the team.”\n\nWhile Owen said he definitely wants his DevSecOps team members to be happy, he also wants productivity to be high. And he noted the importance of evaluating a variety of metrics — from happiness to the number of merge requests and releases — to enable teams to quantify their impact on the company. And those numbers are good, too.\n\nWith GitLab, merge requests per engineer are up by 50% over the last year, and they’ve cut their code release time by 50% over the last two years, according to Owen, who added that the number of quarterly releases to production jumped by 73% since two years ago.\n\n“A lot of that is from just shaving time off the CI/CD pipeline,” he said. “That’s efficiency. That’s productivity. It’s important because I need to be able to justify our work and expenses. These kinds of metrics make it very easy for me to say: ‘Investing in this area has a measurable return on investment.’”\n\n## Using GitLab for security and compliance\n\nWhen a business is known around the world for cybersecurity, assuring the safety and reliability of its own code has to be a priority. HackerOne uses GitLab to ensure that security is built into their software products and processes, according to Owen.\n\nHe explained their need, for example, to keep the number of people who have access to their production systems to a minimum. The fewer people who can touch the system, the fewer potential entry points to introduce vulnerabilities. The platform enables the team to build in guidelines mandating that any change needs to be reviewed and approved by the infrastructure team. So when teams make a change to the infrastructure, it’s only applied to the system once it’s been accepted.\n\n“Changes are being reviewed by people who have expertise in the area,” said Owen. “This really allows efficiency, but also maintains security. And it's all done inside GitLab.”\n\nHackerOne also relies on the GitLab platform to make sure they are staying in line with strict [industry compliance requirements](/blog/meet-regulatory-standards-with-gitlab/). Limiting the number of people who have access to the system is an example of that.\n\nOwen said they also use GitLab to [stay compliant](/blog/top-5-compliance-features-to-leverage-in-gitlab/) with FedRAMP, the Federal Risk and Authorization Management Program, a set of standards the U.S. federal government requires for moving sensitive information into the cloud.\n\n“We do more and more work with containers and FedRAMP requires that they be scanned before being moved into production,” he added. “You can’t just bring them into your infrastructure directly. We stage and scan everything in GitLab — all the containers, particularly third-party containers. Then, if they meet our criteria, we move them on so they’re eligible for production.”\n\nWith GitLab, HackerOne remains FedRAMP compliant for all of its federal customers.\n\n## Looking to an AI future\n\nWhen asked what he thought they’d be focused on over the next five years, Owen replied, “AI, AI, AI.”\n\nFor HackerOne, which brings in immense amounts of data, Owen said he’s looking to artificial intelligence to help them cull that flood of information to find patterns and pull out what will be useful to their customers.\n\n“Whatever business you're in, for a long time, there has been a lot of data,” Owen said. “And you can't help but wonder how you can use it to provide value to your customers, right? There has been a sea change in the last few years, from AI being something that was kind of theoretically interesting to something that is operationally useful. So if you're not doing AI... Well, I'm sure we’re all doing AI. You have to do it now. And the tools are just incredible.”\n\n_HackerOne’s Owen is a software developer, designer, and IT leader with more than 20 years of experience building advanced systems. He previously worked at Google as an engineering manager in the company’s privacy and security department, as well as at Research in Motion, where he was responsible for the Blackberry’s infrastructure design._\n\n_Read more GitLab customer stories on our [customers page](https://about.gitlab.com/customers/)._",[951,9,803,1075],{"slug":4204,"featured":91,"template":698},"why-hackerone-gets-love-letters-from-developers","content:en-us:blog:why-hackerone-gets-love-letters-from-developers.yml","Why Hackerone Gets Love Letters From Developers","en-us/blog/why-hackerone-gets-love-letters-from-developers.yml","en-us/blog/why-hackerone-gets-love-letters-from-developers",{"_path":4210,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4211,"content":4217,"config":4222,"_id":4224,"_type":14,"title":4225,"_source":16,"_file":4226,"_stem":4227,"_extension":19},"/en-us/blog/windows-2022-support-for-gitlab-saas-runners",{"title":4212,"description":4213,"ogTitle":4212,"ogDescription":4213,"noIndex":6,"ogImage":4214,"ogUrl":4215,"ogSiteName":685,"ogType":686,"canonicalUrls":4215,"schema":4216},"Windows 2022 support for GitLab SaaS runners now available","Along with this announcement comes the deprecation of Windows 2019 and the existing tags on Windows runners with GitLab 17.0.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098940/Blog/Hero%20Images/Blog/Hero%20Images/AdobeStock_674148848_4qGCRe0NRFou2mFHkNhv7m_1750098939992.jpg","https://about.gitlab.com/blog/windows-2022-support-for-gitlab-saas-runners","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Windows 2022 support for GitLab SaaS runners now available\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Gabriel Engel\"}],\n        \"datePublished\": \"2024-01-22\",\n      }",{"title":4212,"description":4213,"authors":4218,"heroImage":4214,"date":4219,"body":4220,"category":1277,"tags":4221},[2510],"2024-01-22","We are now supporting Windows 2022 on our SaaS runners on Windows, which are\ncurrently in Beta. This is an important step in our plan to mature SaaS\nrunners to general availability.\n\n\nWith this update, we are also announcing the deprecation of Windows 2019 and\nthe existing tags on Windows runners with GitLab 17.0, as we aim to simplify\nthe tags across our portfolio of SaaS runners. The tags change follows our\nannouncement of [removing tags from our small SaaS runner on\nLinux](https://about.gitlab.com/blog/removing-tags-from-small-saas-runner-on-linux/).\n\n\n### Changes at a glance\n\n\n- Windows 2022 is available now.\n\n- We are changing the tags to `saas-windows-medium-amd64`.\n\n- With GitLab 17.0, we will deprecate Windows 2019 and with it the tags\n`shared-windows` and `windows-1809`.\n\n\n### How to migrate to Windows 2022\n\n\nTo migrate to using Windows 2022, update the tag in your `.gitlab-ci.yaml`\nfile to `saas-windows-medium-amd64` as such:\n\n\n```yaml\n\nwindows-2022:\n  stage: test\n  tags:\n  - saas-windows-medium-amd64\n  script:\n    - echo \"I'm running Windows 2022\"\n```\n\n\nThe job execution will look like this:\n\n\n![windows 2022 -\nmigrate](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098959/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750098959552.png)\n\n\nWith this release, we only support and update Windows 2022 for SaaS runners\non Windows. Users are not able to select a different image version.\n\n\nYou can see all updates to our pre-installed software components that ship\nwith our Windows 2022 image under [Supported Windows\nversions](https://docs.gitlab.com/ee/ci/runners/saas/windows_saas_runner.html#supported-windows-versions).\n\n\n### Action required until GitLab 17.0\n\n\nWith GitLab 17.0, jobs configured with any of the deprecated tags\n`shared-windows` or `windows-1809` will be stuck.\n\n\nAn example job configuration that will be affected:\n\n\n```yaml\n\ntest-invalid-tag:\n  stage: test\n  tags:\n  - shared-windows\n  - windows-1809\n  script:\n    - echo \"I'm affected and will be stuck after 17.0\"\n```\n\n\nThe stuck job execution will look like this:\n\n\n![windows 2022 support - stuck\njob](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750098960/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750098959552.png)\n\n\nTo ensure a smooth transition and avoid job disruptions, update the tag in\nyour `.gitlab-ci.yaml` file to `saas-windows-medium-amd64` in the next three\nmonths.\n\n\n## References:\n\n- [What are SaaS runners?](https://docs.gitlab.com/ee/ci/runners/)\n\n- [SaaS runners on Windows\ndocumentation](https://docs.gitlab.com/ee/ci/runners/saas/windows_saas_runner.html)\n\n- [Tags - '.gitlab-ci.yml' Keyword\nReference](https://docs.gitlab.com/ee/ci/yaml/#tags)\n",[9,717,1133],{"slug":4223,"featured":6,"template":698},"windows-2022-support-for-gitlab-saas-runners","content:en-us:blog:windows-2022-support-for-gitlab-saas-runners.yml","Windows 2022 Support For Gitlab Saas Runners","en-us/blog/windows-2022-support-for-gitlab-saas-runners.yml","en-us/blog/windows-2022-support-for-gitlab-saas-runners",{"_path":4229,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4230,"content":4236,"config":4241,"_id":4243,"_type":14,"title":4244,"_source":16,"_file":4245,"_stem":4246,"_extension":19},"/en-us/blog/write-terraform-plans-faster-with-gitlab-duo-code-suggestions",{"title":4231,"description":4232,"ogTitle":4231,"ogDescription":4232,"noIndex":6,"ogImage":4233,"ogUrl":4234,"ogSiteName":685,"ogType":686,"canonicalUrls":4234,"schema":4235},"Write Terraform plans faster with GitLab Duo Code Suggestions","Follow this tutorial to learn how to use AI-powered code creation to manage your infrastructure with Terraform.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749679194/Blog/Hero%20Images/duo-blog-post.png","https://about.gitlab.com/blog/write-terraform-plans-faster-with-gitlab-duo-code-suggestions","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"Write Terraform plans faster with GitLab Duo Code Suggestions\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Abubakar Siddiq Ango\"}],\n        \"datePublished\": \"2024-01-24\",\n      }",{"title":4231,"description":4232,"authors":4237,"heroImage":4233,"date":4238,"body":4239,"category":801,"tags":4240},[1538],"2024-01-24","[Terraform](https://www.terraform.io/) is an industry-standard for infrastructure orchestration. It can, however, be daunting and time-consuming to learn how to create infrastructure resources, especially when using Terraform providers you are unfamiliar with. That is where GitLab Duo Code Suggestions and AI-powered code creation comes in.\n\n[GitLab Duo Code Suggestions](https://about.gitlab.com/solutions/code-suggestions/) accelerates your coding in up to [15 supported programming languages](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/index.html#supported-languages-in-ides), including infrastructure as code (IaC) using Terraform.\n\nBy combining IaC with Terraform, infrastructure teams can adopt new Terraform providers/modules quickly and move changes to production faster. The combination also reduces the onboarding time for new users of Terraform or developers who are new to a code base by maintaining the context of the code base and dependencies in its suggestions. Instead of spending hours reading through documentation, you only need to review the given suggestions and update as necessary.\n\nIn this post, you will learn how to set up GitLab Duo Code Suggestions for Terraform.\n\n## How to set up GitLab Duo Code Suggestions for Terraform\n\nFollow these 6 steps to get started:\n\n1. Install the GitLab extension for your IDE of choice (read about [supported IDE extensions](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/index.html#supported-editor-extensions).\n2. Authenticate the GitLab extension with GitLab.com or your GitLab self-hosted instance.\n3. Enable Code Suggestions in your Profile settings under the Preferences > Preferences menu. Search for Code Suggestions, toggle the checkbox, and click Save.\n4. Confirm that Code Suggestions is also enabled in your GitLab extensions settings in the IDE.\n5. If you are using Visual Studio Code and Neovim, third-party extension support is needed to use Terraform with Code Suggestions. For VS Code, you can install the [official Terraform extension](https://marketplace.visualstudio.com/items?itemName=HashiCorp.terraform) from HashiCorp.\n6. To test and apply your Terraform plans, [install Terraform](https://developer.hashicorp.com/terraform/install?product_intent=terraform) in your terminal and any necessary provider SDKs.\n\n## Creating your plans\n\nOnce you have all the prerequisites in place, all you need to do next is create a Terraform file ending with `.tf` and prompt GitLab Duo with comments describing what you want. Suggestions will be displayed while maintaining context. Even when no prompt is provided, GitLab Duo will suggest other Terraform resources you might be interested in based on the current context in your plans.\n\nLet's see this in action with GitLab Duo helping with a Terraform plan for provisioning a load balancer with four instances and other necessary resources on the Google Cloud Platform.\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/saa2JJ57UaQ\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n## Get started with Code Suggestions and Terraform today\n\n[Try Code Suggestions](https://about.gitlab.com/solutions/code-suggestions/) today and accelerate your software development. Code Suggestions is also available for [self-managed GitLab](https://docs.gitlab.com/ee/user/project/repository/code_suggestions/self_managed.html) (needs to be enabled by an administrator). Code Suggestions uses best-in-class large language models, and non-public customer data is never used as training data.",[9,803,719],{"slug":4242,"featured":91,"template":698},"write-terraform-plans-faster-with-gitlab-duo-code-suggestions","content:en-us:blog:write-terraform-plans-faster-with-gitlab-duo-code-suggestions.yml","Write Terraform Plans Faster With Gitlab Duo Code Suggestions","en-us/blog/write-terraform-plans-faster-with-gitlab-duo-code-suggestions.yml","en-us/blog/write-terraform-plans-faster-with-gitlab-duo-code-suggestions",{"_path":4248,"_dir":4249,"_draft":6,"_partial":6,"_locale":7,"content":4250,"config":4255,"_id":4257,"_type":14,"title":4258,"_source":16,"_file":4259,"_stem":4260,"_extension":19},"/en-us/blog/external-url/gitlab-16-11-release","external-url",{"title":4251,"description":4252,"heroImage":4253,"date":2084,"category":1277,"tags":4254},"GitLab 16.11 Release","GitLab 16.11 released with GitLab Duo Chat general availability","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750099004/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%284%29_1od9f5DGEA0ntKLVnJbA2p_1750099004258.png",[781,9],{"featured":6,"externalUrl":4256},"https://about.gitlab.com/releases/2024/04/18/gitlab-16-11-released/","content:en-us:blog:external-url:gitlab-16-11-release.yml","Gitlab 16 11 Release","en-us/blog/external-url/gitlab-16-11-release.yml","en-us/blog/external-url/gitlab-16-11-release",{"_path":4262,"_dir":4249,"_draft":6,"_partial":6,"_locale":7,"content":4263,"config":4269,"_id":4271,"_type":14,"title":4272,"_source":16,"_file":4273,"_stem":4274,"_extension":19},"/en-us/blog/external-url/gitlab-16-9-release",{"title":4264,"description":4265,"heroImage":4266,"date":4267,"category":1277,"tags":4268},"GitLab 16.9 Release","16.9 features GitLab Duo Chat with wider Beta access, usability improvements to the CI/CD variables page, more options for auto-canceling pipelines, and more!","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749668947/Blog/Hero%20Images/16.9_cover_image_-_Blog-1800x800.png","2024-02-15",[781,9],{"featured":91,"externalUrl":4270},"https://about.gitlab.com/releases/2024/02/15/gitlab-16-9-released/","content:en-us:blog:external-url:gitlab-16-9-release.yml","Gitlab 16 9 Release","en-us/blog/external-url/gitlab-16-9-release.yml","en-us/blog/external-url/gitlab-16-9-release",{"_path":4276,"_dir":4249,"_draft":6,"_partial":6,"_locale":7,"content":4277,"config":4282,"_id":4284,"_type":14,"title":4285,"_source":16,"_file":4286,"_stem":4287,"_extension":19},"/en-us/blog/external-url/gitlab-17-1-release",{"title":4278,"description":4279,"heroImage":4280,"date":3215,"category":1277,"tags":4281},"GitLab 17.1 Release","GitLab 17.1 released with Model registry available in beta and multiple GitLab Duo Code Suggestions in VS Code.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749669047/Blog/Hero%20Images/product-gl17-blog-release-cover-17-1-0093-1800x945-fy25.png",[781,717,9],{"featured":91,"externalUrl":4283},"https://about.gitlab.com/releases/2024/06/20/gitlab-17-1-released/","content:en-us:blog:external-url:gitlab-17-1-release.yml","Gitlab 17 1 Release","en-us/blog/external-url/gitlab-17-1-release.yml","en-us/blog/external-url/gitlab-17-1-release",{"_path":4289,"_dir":4249,"_draft":6,"_partial":6,"_locale":7,"content":4290,"config":4296,"_id":4298,"_type":14,"title":4299,"_source":16,"_file":4300,"_stem":4301,"_extension":19},"/en-us/blog/external-url/gitlab-17-2-release",{"title":4291,"description":4292,"heroImage":4293,"date":4294,"category":717,"tags":4295},"GitLab 17.2 Release","GitLab 17.2 released with log streaming, a new pipeline execution security policy, and vulnerability explanations now generally available","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749667769/Blog/Hero%20Images/product-gl17-blog-release-cover-17-2-0093-1800x945-fy25.png","2024-07-18",[781,717,9],{"featured":91,"externalUrl":4297},"https://about.gitlab.com/releases/2024/07/18/gitlab-17-2-released/","content:en-us:blog:external-url:gitlab-17-2-release.yml","Gitlab 17 2 Release","en-us/blog/external-url/gitlab-17-2-release.yml","en-us/blog/external-url/gitlab-17-2-release",{"_path":4303,"_dir":4249,"_draft":6,"_partial":6,"_locale":7,"content":4304,"config":4310,"_id":4312,"_type":14,"title":4313,"_source":16,"_file":4314,"_stem":4315,"_extension":19},"/en-us/blog/external-url/gitlab-17-4-release",{"title":4305,"description":4306,"heroImage":4307,"date":4308,"category":717,"tags":4309},"GitLab 17.4 Release","GitLab 17.4 released with improved context in GitLab Duo","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749666419/Blog/Hero%20Images/product-gl17-blog-release-cover-17-4-0093-1800x945-fy25.png","2024-09-19",[781,717,9,1133],{"featured":6,"externalUrl":4311},"https://about.gitlab.com/releases/2024/09/19/gitlab-17-4-released/","content:en-us:blog:external-url:gitlab-17-4-release.yml","Gitlab 17 4 Release","en-us/blog/external-url/gitlab-17-4-release.yml","en-us/blog/external-url/gitlab-17-4-release",{"_path":4317,"_dir":4249,"_draft":6,"_partial":6,"_locale":7,"content":4318,"config":4324,"_id":4326,"_type":14,"title":4327,"_source":16,"_file":4328,"_stem":4329,"_extension":19},"/en-us/blog/external-url/gitlab-17-6-released-with-self-hosted-duo-chat-in-beta",{"title":4319,"description":4320,"heroImage":4321,"date":4322,"category":717,"tags":4323},"GitLab 17.6 released with self-hosted Duo Chat in beta","GitLab 17.6 released with self-hosted Duo Chat in beta, adherence checks for SAST and DAST security scanners, vulnerability report grouping, model registry and much more!","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662194/Blog/Hero%20Images/product-gl17-blog-release-cover-17-6-0093-1800x945-fy25.png","2024-11-21",[717,9,781],{"featured":91,"externalUrl":4325},"https://about.gitlab.com/releases/2024/11/21/gitlab-17-6-released/","content:en-us:blog:external-url:gitlab-17-6-released-with-self-hosted-duo-chat-in-beta.yml","Gitlab 17 6 Released With Self Hosted Duo Chat In Beta","en-us/blog/external-url/gitlab-17-6-released-with-self-hosted-duo-chat-in-beta.yml","en-us/blog/external-url/gitlab-17-6-released-with-self-hosted-duo-chat-in-beta",{"_path":4331,"_dir":4249,"_draft":6,"_partial":6,"_locale":7,"content":4332,"config":4338,"_id":4340,"_type":14,"title":4341,"_source":16,"_file":4342,"_stem":4343,"_extension":19},"/en-us/blog/external-url/gitlab-17-7-released",{"title":4333,"description":4334,"heroImage":4335,"date":4336,"category":717,"tags":4337},"GitLab 17.7 released","Release includes a new Planner user role, auto-resolution policy for vulnerabilities, admin-controlled instance integration allowlists, access token rotation in the UI and much more!","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662186/Blog/Hero%20Images/product-gl17-blog-release-cover-17-7-0093-1800x945-fy25.png","2024-12-19",[781,717,9],{"featured":91,"externalUrl":4339},"https://about.gitlab.com/releases/2024/12/19/gitlab-17-7-released/","content:en-us:blog:external-url:gitlab-17-7-released.yml","Gitlab 17 7 Released","en-us/blog/external-url/gitlab-17-7-released.yml","en-us/blog/external-url/gitlab-17-7-released",{"_path":4345,"_dir":4249,"_draft":6,"_partial":6,"_locale":7,"content":4346,"config":4354,"_id":4356,"_type":14,"title":4357,"_source":16,"_file":4358,"_stem":4359,"_extension":19},"/en-us/blog/external-url/gitlab-patch-release-17-0-1-16-11-3-16-10-6",{"title":4347,"description":4348,"heroImage":4349,"date":4350,"category":693,"tags":4351},"GitLab Patch Release: 17.0.1, 16.11.3, 16.10.6","Learn more about GitLab Patch Release: 17.0.1, 16.11.3, 16.10.6 for GitLab Community Edition (CE) and Enterprise Edition (EE).","https://res.cloudinary.com/about-gitlab-com/image/upload/v1749662877/Blog/Hero%20Images/security-cover-new.png","2024-05-22",[4352,4353,9],"security releases","patch releases",{"featured":6,"externalUrl":4355},"https://about.gitlab.com/releases/2024/05/22/patch-release-gitlab-17-0-1-released/","content:en-us:blog:external-url:gitlab-patch-release-17-0-1-16-11-3-16-10-6.yml","Gitlab Patch Release 17 0 1 16 11 3 16 10 6","en-us/blog/external-url/gitlab-patch-release-17-0-1-16-11-3-16-10-6.yml","en-us/blog/external-url/gitlab-patch-release-17-0-1-16-11-3-16-10-6",{"_path":4361,"_dir":4249,"_draft":6,"_partial":6,"_locale":7,"content":4362,"config":4367,"_id":4369,"_type":14,"title":4370,"_source":16,"_file":4371,"_stem":4372,"_extension":19},"/en-us/blog/external-url/gitlab-security-release-16-9-1-16-8-3-16-7-6",{"title":4363,"description":4364,"heroImage":4349,"date":4365,"category":693,"tags":4366},"GitLab Security Release: 16.9.1, 16.8.3, 16.7.6","Learn more about GitLab Security Release: 16.9.1, 16.8.3, 16.7.6 for GitLab Community Edition (CE) and Enterprise Edition (EE).","2024-02-21",[4352,693,9],{"featured":6,"externalUrl":4368},"https://about.gitlab.com/releases/2024/02/21/security-release-gitlab-16-9-1-released/","content:en-us:blog:external-url:gitlab-security-release-16-9-1-16-8-3-16-7-6.yml","Gitlab Security Release 16 9 1 16 8 3 16 7 6","en-us/blog/external-url/gitlab-security-release-16-9-1-16-8-3-16-7-6.yml","en-us/blog/external-url/gitlab-security-release-16-9-1-16-8-3-16-7-6",{"_path":4374,"_dir":246,"_draft":6,"_partial":6,"_locale":7,"seo":4375,"content":4381,"config":4386,"_id":4388,"_type":14,"title":4389,"_source":16,"_file":4390,"_stem":4391,"_extension":19},"/en-us/blog/10-best-practices-for-using-ai-powered-gitlab-duo-chat",{"title":4376,"description":4377,"ogTitle":4376,"ogDescription":4377,"noIndex":6,"ogImage":4378,"ogUrl":4379,"ogSiteName":685,"ogType":686,"canonicalUrls":4379,"schema":4380},"10 best practices for using AI-powered GitLab Duo Chat","Explore tips and tricks for integrating GitLab Duo Chat into your AI-powered DevSecOps workflows. Plus, expert advice on how to refine chat prompts for the best results.","https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097639/Blog/Hero%20Images/Blog/Hero%20Images/blog-image-template-1800x945%20%281%29_77JeTV9gAmbXM0224acirV_1750097638765.png","https://about.gitlab.com/blog/10-best-practices-for-using-ai-powered-gitlab-duo-chat","\n                        {\n        \"@context\": \"https://schema.org\",\n        \"@type\": \"Article\",\n        \"headline\": \"10 best practices for using AI-powered GitLab Duo Chat\",\n        \"author\": [{\"@type\":\"Person\",\"name\":\"Michael Friedrich\"}],\n        \"datePublished\": \"2024-04-02\",\n      }",{"title":4376,"description":4377,"authors":4382,"heroImage":4378,"date":4383,"body":4384,"category":801,"tags":4385},[1379],"2024-04-02","Getting into a conversation with AI can be challenging. What question do you\nstart with? How do you frame the question? How much context is needed? Will\nthe conversation provide the best and most efficient results?\n\n\nIn this tutorial, we explore 10 tips and best practices to integrate GitLab\nDuo Chat into your AI-powered DevSecOps workflows and refine your prompts\nfor the best results.\n\n\n[Get started: Keep GitLab Duo Chat open and in\nsight](#get-started-keep-gitlab-duo-chat-open-and-in-sight)\n\n\n[10 best practices for using GitLab Duo\nChat](#10-best-practices-for-using-gitlab-duo-chat)\n\n\n1. [Have a conversation](#1.-have-a-conversation)\n\n2. [Refine the prompt for more\nefficiency](#2.-refine-the-prompt-for-more-efficiency)\n\n3. [Follow prompt patterns](#3.-follow-prompt-patterns)\n\n4. [Use low-context communication](#4.-use-low-context-communication)\n\n5. [Repeat yourself](#5.-repeat-yourself)\n\n6. [Be patient](#6.-be-patient)\n\n7. [Reset and start anew](#7.-reset-and-start-anew)\n\n8. [Gain efficiency with slash commands in the\nIDE](#8.-gain-efficiency-with-slash-commands-in-the-ide)\n\n9. [Refine the prompt for slash\ncommands](#9.-refine-the-prompt-for-slash-commands)\n\n10. [Get creative with slash\ncommands](#10.-get-creative-with-slash-commands)\n\n\nBonus content:\n\n- [Shortcuts](#shortcuts)\n\n- [Fun exercises](#fun-exercises)\n\n- [Learn more](#learn-more)\n\n\n> Live demo! Discover the future of AI-driven software development with our\nGitLab 17 virtual launch event. [Register\ntoday!](https://about.gitlab.com/seventeen/)\n\n\n## Get started: Keep GitLab Duo Chat open and in sight\n\n\n[GitLab Duo Chat](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html) is\navailable in the GitLab UI, Web IDE, and supported programming IDEs, for\nexample, VS Code. \n\n\nIn VS Code, you can open GitLab Duo Chat in the default left pane. You can\nalso drag and drop the icon into the right pane. This allows you to keep\nChat open while you write code and navigate the file tree, perform Git\nactions, etc. To reset the Chat location, open the command palette (by\npressing the `Command+Shift+P` (on macOS) or `Ctrl+Shift+P` (on\nWindows/Linux) keyboard shortcut and then type `View: Reset View Locations`.\nThe following short video shows you how to do it.\n\n\n\u003C!-- blank line -->\n\n\u003Cfigure class=\"video_container\">\n  \u003Ciframe src=\"https://www.youtube.com/embed/foZpUvWPRJQ\" frameborder=\"0\" allowfullscreen=\"true\"> \u003C/iframe>\n\u003C/figure>\n\n\u003C!-- blank line -->\n\n\nThe Web IDE and VS Code share the same framework – the same method works in\nthe Web IDE for more efficient workflows.\n\n\n![Chat in Web\nIDE](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image1_aHR0cHM6_1750097645344.png)\n\n\n## 10 best practices for using GitLab Duo Chat\n\n\n### 1. Have a conversation\n\n\nChats are conversations, not search forms.\n\n\nFor the first conversation icebreaker, you can start with the same search\nterms similar to a browser search and experiment with the response and\noutput. In this example, let's start with a C# project and best practices. \n\n\n> c# start project best practices\n\n\n![Chat prompt for C# start project best practices and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097646/Blog/Content%20Images/Blog/Content%20Images/image11_aHR0cHM6_1750097645345.png)\n\n\nThe response is helpful to understand a broad scope of C#, but does not\nkickstart immediate best practices. Let's follow up with a more focused\nquestion in the same context. \n\n\n> Please show the project structure for the C# project.\n\n\n![Chat prompt for project structure for the C# project and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image9_aHR0cHM6_1750097645346.png)\n\n\nThis answer is helpful. Next, let's follow up with a Git question, and use\nthe same question structure: Direct request to show something.\n\n\n> Show an example for a .gitignore for C#\n\n\n![Chat prompt for a .gitignore for C# and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image25_aHR0cHM6_1750097645347.png)\n\n\nContinue with CI/CD and ask how to build the C# project.\n\n\n> Show a GitLab CI/CD configuration for building the C# project\n\n\n![Chat prompt for GitLab CI/CD configuration for building C# project and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image16_aHR0cHM6_1750097645349.png)\n\n\nIn this example, Chat encouraged us to request specific changes. Let's ask\nto use the .NET SDK 8.0 instead of 6.0. \n\n\n> In the above example, please use the .NET SDK 8.0 image\n\n\n![Chat prompt to use .NET SDK 8.0 image and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image32_aHR0cHM6_1750097645350.png)\n\n\nThe CI/CD configuration uses the .NET command line interface (CLI). Maybe we\ncan use that for more efficient commands to create the projects and tests\nstructure, too? \n\n\n> Explain how to create projects and test structure on the CLI \n\n\n![Chat prompt to explain how to create projects and test structure on the\nCLI and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image14_aHR0cHM6_1750097645351.png)\n\n\nOf course, we could execute these commands in the terminal, but what if we\nwanted to stay in VS Code? Let's ask Chat.\n\n\n> Explain how to open a new terminal in VS Code\n\n\n![Chat prompt to explain how to open a new terminal in VS Code and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image5_aHR0cHM6_1750097645351.png)\n\n\n### 2. Refine the prompt for more efficiency\n\n\nThink of GitLab Duo Chat as a human, and engage with full sentences that\nprovide as much context into your thoughts and questions. \n\n\nExperienced browser search users might know this approach to queries: Build\nup the question, add more terms to refine the scope, and restart the search\nafter opening plenty of tabs. \n\n\nIn a browser search, this probably would result in four to five different\nsearch windows. \n\n\n```markdown\n\nc# start project best practices\n\nc# .gitignore\n\nc# gitlab cicd \n\nc# gitlab security scanning \n\nc# solutions and projects, application and tests\n\n``` \n\n\nYou can follow this strategy in a chat conversation, too. It requires adding\nmore context, making it a conversational approach. GitLab Duo Chat enables\nyou to ask multiple questions in one conversation request. Example: You need\nto start with a new C# project, apply best practices, add a `.gitignore`\nfile, and configure CI/CD and security scanning, just like in the above\nsearch. In Chat, you can combine the questions into one request.\n\n\n> How can I get started creating an empty C# console application in VS Code?\nPlease show a .gitignore and .gitlab-ci.yml configuration with steps for C#,\nand add security scanning for GitLab. Explain how solutions and projects in\nC# work, and how to add a test project on the CLI.\n\n\n![Chat prompt adding more context and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image37_aHR0cHM6_1750097645352.png)\n\n\nIn this response, Chat suggests to ask for specific configuration examples\nin follow-up questions in the conversation. Async practice: Create follow-up\nquestions. You can omit `C#` as context in the same chat session.\n\n\n> Please show an example for a .gitignore. Please show a CI/CD\nconfiguration. Include the SAST template.\n\n\n### 3. Follow prompt patterns \n\n\nFollow the pattern: `Problem statement, ask for help, provide additional\nrequests`. Not everything comes to mind when asking the first question –\ndon't feel blocked, and instead start with `Problem statement, ask for help`\nin the first iteration. \n\n\n> I need to fulfill compliance requirements. How can I get started with\nCodeowners and approval rules?\n\n\n![Chat prompt to get started with Codeowners and approval rules and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image19_aHR0cHM6_1750097645352.png)\n\n\nThe answer is helpful but obviously generic. Now, you may want to get\nspecific help for your team setup. \n\n\n> Please show an example for Codeowners with different teams: backend,\nfrontend, release managers.\n\n\n![Chat prompt to show an example for Codeowners with different teams:\nbackend, frontend, release managers and reponse\n](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image31_aHR0cHM6_1750097645353.png)\n\n\nAn alternative is to describe the situation you are in and to ask for input.\nIt can feel a bit like a conversation to follow the STAR model (Situation,\nTask, Action, Results). \n\n\n> I have a Kubernetes cluster integrated in GitLab. Please generate a Yaml\nconfiguration for a Kubernetes service deployment. Explain how GitOps works\nas a second step. How to verify the results?\n\n\n![Chat prompt with multiple questions and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image27_aHR0cHM6_1750097645354.png)\n\n\n### 4. Use low-context communication \n\n\nProvide as much context as needed to provide an answer. Sometimes, the\nprevious history or opened source code does not provide that helpful\ncontext. To make questions more efficient, apply a pattern of [low-context\ncommunication](https://handbook.gitlab.com/handbook/company/culture/all-remote/effective-communication/#understanding-low-context-communication),\nwhich is used in all-remote communication at GitLab.\n\n\nThe following question did not provide enough context in a C++ project.\n\n\n> Should I use virtual override instead of just override?\n\n\n![Chat prompt asking if the users should use virtual override instead of\njust override and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image34_aHR0cHM6_1750097645354.png)\n\n\nInstead, try to add more context:\n\n\n> When implementing a pure virtual function in an inherited class, should I\nuse virtual function override, or just function override? Context is C++. \n\n\n![Chat prompt with more detail and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image36_aHR0cHM6_1750097645355.png)\n\n\nThe example is also shown in the [GitLab Duo Coffee Chat: Refactor C++\nfunctions into OOP classes for abstract database\nhandling](https://youtu.be/Z9EJh0J9358?t=2190). \n\n\n### 5. Repeat yourself\n\n\nAI is not predictable. Sometimes, it may not answer with the expected\nresults, or does not produce source code examples or configuration snippets\nbecause it lacked context. It is recommended to repeat the question and\nrefine the requirements.\n\n\nIn the following example, we want to create a C# application. In the first\nattempt, we did not specify the application type – C# can be used to create\nconsole/terminal but also UI applications. The result also does not provide\nan empty example source code. The second, repeated prompt adds two more\nwords - `console` and `empty`. \n\n\n> How can I get started creating an C# application in VSCode?\n\n> \n\n> How can I get started creating an empty C# console application in VSCode?\n\n\nThe results in the prompt differ. The first response is helpful to get\nstarted by following the instructions in the VS Code window, but it does not\ntell us where the source code is located and how to modify it. The repeated\nprompt with refinements modifies the response and provides instructions how\nto override the default template with some “hello world” code.\n\n\n![Chat prompt with repeated prompt with modifications and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image28_aHR0cHM6_1750097645355.png)\n\n\nYou can also combine repeat and refine strategies, and ask Chat to show an\nexample for application code and tests.\n\n\n> How can I get started creating an empty C# console application in VSCode?\nPlease show an example for application and tests.\n\n\n![Chat prompt that asks for example for application and tests and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image3_aHR0cHM6_1750097645356.png)\n\n\n#### Repeat yourself after generic questions \n\n\nWhen asking generic technology questions, GitLab Duo Chat might not be able\nto help. In the following scenario, I wanted to get a suggestion for Java\nbuild tools and framework, and it did not work. There could be many answers:\nMaven, Gradle, etc., as build tools, and [100+ Java\nframeworks](https://en.wikipedia.org/wiki/List_of_Java_frameworks),\ndepending on the technology stack and requirements.\n\n\n![Chat prompt for Java build tools and framework and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image2_aHR0cHM6_1750097645356.png)\n\n\nLet's assume that we want to focus on a customer environment with [Java\nSpring Boot](https://spring.io/projects/spring-boot). \n\n\n> I want to create a Java Spring Boot application. Please explain the\nproject structure and show a hello world example.\n\n\n![Chat prompt that asks for more, including a hello world example and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image26_aHR0cHM6_1750097645357.png)\n\n\nThis provides great results already. As an async exercise, repeat the\nprompt, and ask how to deploy the application, adding more refinements in\neach step. Alternatively, you can make it a follow-up conversation.\n\n\n> I want to create a Java Spring Boot application. Please explain the\nproject structure and show a hello world example. Show how to build and\ndeploy the application in CI/CD.\n\n> \n\n> I want to create a Java Spring Boot application. Please explain the\nproject structure and show a hello world example. Show how to build and\ndeploy the application in CI/CD, using container images.\n\n> \n\n> I want to create a Java Spring Boot application. Please explain the\nproject structure and show a hello world example. Show how to build and\ndeploy the application in CI/CD, using container images. Use Kubernetes and\nGitOps in GitLab.\n\n\n### 6. Be patient\n\n\nSingle words or short sentences might not generate the desired results, [as\nshown in this video example](https://youtu.be/JketELxLNEw?t=1220).\nSometimes, GitLab Duo Chat is able to guess from available data, but\nsometimes also might insist on providing more context.\n\n\nExample: `labels` matches the GitLab documentation content.\n\n\n![Chat prompt about labels and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image12_aHR0cHM6_1750097645357.png)\n\n\nRefine the question to problem statements and more refinements for issue\nboard usage.\n\n\n> Explain labels in GitLab. Provide an example for efficient usage with\nissue boards.\n\n\n![Chat prompt that includes asking for an example and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image21_aHR0cHM6_1750097645358.png)\n\n\nOr use a problem statement, followed by a question and the ask for\nadditional examples.\n\n\n> I don't know how to use labels in GitLab. Please provide examples, and how\nto use them for filters in different views. Explain these views with\nexamples.\n\n\n![Chat prompt with problem statement and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image10_aHR0cHM6_1750097645358.png)\n\n\nAlso, avoid `yes/no` questions and instead add specific context.\n\n\n> Can you help me fix performance regressions?\n\n\n![Chat promptt that asks for help with fixing performance regressions and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image18_aHR0cHM6_1750097645359.png)\n\n\nInstead, provide the context of the performance regression, including the\nprogramming languages, frameworks, technology stack, and environments. The\nfollowing example uses an environment from some years ago, which can still\nbe accurate today.\n\n\n> My PHP application encounters performance regressions using PHP 5.6 and\nMySQL 5.5. Please explain potential root causes, and how to address them.\nThe app is deployed on Linux VMs.\n\n\n![Chat prompt that includes more detail and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image24_aHR0cHM6_1750097645360.png)\n\n\n### 7. Reset and start anew\n\n\nSometimes, the chat history shows a different learning curve and provides\nthe wrong context for follow-up questions. Or, you asked specific questions\nwhere GitLab Duo Chat cannot provide answers. Since generative AI is not\npredictable, it might also lack the ability to provide certain examples, but\nthink it gave them in a future response (observed in Chat Beta). The\nunderlying large language models, or LLMs, sometimes might insist on giving\na specific response, in an endless loop.\n\n\n> How can I get started creating an empty C# console application in VSCode?\nPlease show a .gitignore and .gitlab-ci.yml configuration with steps for C#,\nand add security scanning for GitLab. Explain how solutions and projects in\nC# work, and how to add a test project on the CLI.\n\n\nAfter asking the question above with an example configuration, I wanted to\nreduce the scope of the question to get a more tailored response. It did not\nwork as expected, since Chat knows about the chat history in context, and\nrefers to previous answers.\n\n\n> How can I get started creating an empty C# console application in VSCode?\nPlease show a .gitignore and .gitlab-ci.yml configuration with steps for C#.\n\n\n![Chat prompt that asks for configuration examples and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image23_aHR0cHM6_1750097645360.png)\n\n\nTo force Chat into a new context, use `/reset` as slash command to reset the\nsession, and repeat the question to get better results. You can also use\n`/clean` or `/clear` to delete all messages in the conversation.\n\n\n### 8. Gain efficiency with slash commands in the IDE \n\n\n#### Explain code\n\n\n- Q: Generated code? Existing code? Legacy code?\n\n- A: Use the [`/explain` slash command in the\nIDE](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html#explain-code-in-the-ide).\n\n- A2: Refine the prompt with more focused responses, for example: `/explain\nfocus on potential shortcomings or bugs`. \n\n\n![Chat prompt with /explain slash\ncommand](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/gitlab_duo_chat_slash_commands_explain_01_aHR0cHM6_1750097645361.png)\n\n\n![Chat prompt with refined\nprompt](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image6_aHR0cHM6_1750097645361.png)\n\n\n#### Refactor code \n\n\n- Q: Unreadable code? Long spaghetti code? Zero test coverage?\n\n- A: Use the [`/refactor` slash command in the\nIDE](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html#refactor-code-in-the-ide). \n\n- A2: Refine the prompt for more targeted actions, for example\nobject-oriented patterns: `/refactor into object-oriented classes with\nmethods and attributes`. \n\n\n![Chat prompt with /refactor slash\ncommand](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image35_aHR0cHM6_1750097645362.png)\n\n\n![Chat prompt with refined\nprompt](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image30_aHR0cHM6_1750097645362.png)\n\n\n#### Generate tests\n\n\n- Q: Testable code but writing tests takes too much time?\n\n- A: Use the [`/tests` slash command in the\nIDE](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html#write-tests-in-the-ide).\n\n- A2: Refine the prompt for specific test frameworks, or test targets. You\ncan also instruct the prompt to focus on refactoring, and then generate\ntests: `/tests focus on refactoring the code into functions, and generate\ntests`.\n\n\n![Chat prompt with /tests slash\ncommand](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image29_aHR0cHM6_1750097645363.png)\n\n\n![Chat prompt with refined\nprompt](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image4_aHR0cHM6_1750097645363.png)\n\n\nMore practical examples in complete development workflows are available in\nthe [GitLab Duo\nexamples](https://docs.gitlab.com/ee/user/gitlab_duo_examples.html)\ndocumentation.\n\n\n### 9. Refine the prompt for slash commands \n\n\nYou will see refined prompts tips in this blog post a lot. It is one of the\ningredients for better AI-powered workflow efficiency. Slash commands are no\ndifferent, and allow for better results in GitLab Duo Chat.\n\n\nA customer recently asked: \"Can code explanations using `/explain` create\ncomments in code?\" The answer is: no. But you can use the Chat prompt to ask\nfollow-up questions, and ask for a summary in a code comment format. It\nrequires the context of the language. \n\n\nThe following example with a [C++ HTTP client code using the curl\nlibrary](https://gitlab.com/gitlab-da/use-cases/ai/ai-workflows/gitlab-duo-prompts/-/blob/5cc9bdd65ee8ee16c548bea0402c18f8209d4d06/chat/slash-commands/c++/cli.cpp)\nneeds more documentation. You can refine the `/explain` prompt by giving\nmore refined instructions to explain the code by adding code comments, and\nthen copy-paste that into the editor.\n\n\n> /explain add documentation, rewrite the code snippet\n\n\n![Chat prompt to add documentation and rewrite code snippet and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image13_aHR0cHM6_1750097645363.png)\n\n\nAlternatively, you can ask Chat to `/refactor` the source code, and generate\nmissing code comments through a refined prompt.\n\n\n> /refactor add code comments and documentation\n\n\n![Chat prompt to refactor source code and generate code\ncomments](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image15_aHR0cHM6_1750097645364.png)\n\n\n### 10. Get creative with slash commands\n\n\nWhen the Chat prompt does not know an answer to a question about the source\ncode or programming language, look into the slash commands `/explain`,\n`/refactor`, and `/tests` and how much they can help in the context.\n\n\nIn the following example, an SQL query string in C++ is created in a single\nline. To increase readability, and also add more database columns in the\nfuture, it can be helpful to change the formatting into a multi-line string.\n\n\n> std::string sql = \"CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY\nKEY AUTOINCREMENT, name TEXT NOT NULL, email TEXT NOT NULL)\";\n\n\nYou can ask GitLab Duo Chat about it, for example, with the following\nquestion:\n\n\n> How to create a string in C++ using multiple lines?\n\n\nChat may answer with an explanation and optional, a source code example. In\nthis context, it can interpret the question to create a C++ string value\nwith multiple lines, for example, using the `\\n` character, assigned to a\nvariable. \n\n\nThe requirement instead is to only format the written code, and variable\nvalue assignment in multiple lines. The string value itself does not need to\ncontain a multi-line string representation. \n\n\nThere is an alternative for additional context in VS Code and the Web IDE:\nSelect the source code in question, right-click, and navigate into `GitLab\nDuo Chat > Refactor`. This opens the Chat prompt and fires the `/refactor`\ncode task immediately.\n\n\nAlthough, the code task might not bring the expected results. Refactoring a\nsingle-line SQL string can mean a lot of things: Use multiple lines for\nreadability, create constants, etc.\n\n\nCode tasks provide an option to refine the prompt. You can add more text\nafter the `/refactor` command, and instruct GitLab Duo Chat to use a\nspecific code type, algorithm, or design pattern. \n\n\nLet's try it again: Select the source code, change focus into Chat, and type\nthe following prompt, followed by `Enter`. \n\n\n> /refactor into a multi-line written string. Show different approaches for\nall C++ standards.\n\n\n![Chat prompt to refactor into a multi-line written string and\nresponse](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image17_aHR0cHM6_1750097645364.png)\n\n\n**Tip:** You can use GitLab Duo Code Suggestions to refine the source code\neven more after refactoring, or use alternative `/refactor` prompt\nrefinements.\n\n\n>/refactor into a multi-line written string, show different approaches\n\n>\n\n> /refactor into multi-line string, not using raw string literals\n\n>\n\n> /refactor into a multi-line written string. Make the table name\nparametrizable\n\n\nAn alternative approach with the `stringstream` type is shown in the [GitLab\nDuo Coffee Chat: Refactor C++ functions into OOP classes for abstract\ndatabase handling](https://www.youtube.com/watch?v=Z9EJh0J9358), [MR\ndiff](https://gitlab.com/gitlab-da/use-cases/ai/gitlab-duo-coffee-chat/gitlab-duo-coffee-chat-2024-01-23/-/commit/7ea233138aed46d77e6ce0d930dd8e10560134eb#4ce01e4c84d4b62df8eed159c2db3768ad4ef8bf_33_35). \n\n\n#### Explain vulnerabilities\n\n\nIt might not always work, but the `/explain` slash command can be asked\nabout security vulnerability explanations, too. In this example, the [C\ncode](https://gitlab.com/gitlab-da/use-cases/ai/ai-workflows/gitlab-duo-prompts/-/blob/5a5f293dfbfac7222ca4013d8f9ce9b462e4cd3a/chat/slash-commands/c/vuln.c)\ncontains multiple vulnerabilities for strcpy() buffer overflows, world\nwritable file permissions, race condition attacks, and more.\n\n\n> /explain why this code has multiple vulnerabilities\n\n\n![Chat prompt about the code's multiple\nvulnerabilities](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image20_aHR0cHM6_1750097645365.png)\n\n\n#### Refactor C code into Rust\n\n\nRust provides memory safety. You can ask Duo Chat to refactor the vulnerable\n[C\ncode](https://gitlab.com/gitlab-da/use-cases/ai/ai-workflows/gitlab-duo-prompts/-/blob/5a5f293dfbfac7222ca4013d8f9ce9b462e4cd3a/chat/slash-commands/c/vuln.c)\ninto Rust, using `/refactor into Rust`. Practice with more refined prompts\nto get better results.\n\n\n> /refactor into Rust and use high level libraries\n\n\n![Chat\nprompt](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image8_aHR0cHM6_1750097645366.png)\n\n\n### Shortcuts \n\n\nGive these shortcuts a try in your environment, and practice async using\nGitLab Duo Chat.\n\n\n1. Inspect vulnerable code from CVEs, and ask what it does, and how to fix\nit, using `/explain why is this code vulnerable`. \n\n**Tip:** Import open-source projects in GitLab to take advantage of GitLab\nDuo Chat code explanations.\n\n1. Try to refactor code into new programming languages to help legacy code\nmigration plans.\n\n1. You can also try to refactor Jenkins configuration into GitLab CI/CD,\nusing `/refactor into GitLab CI/CD configuration`. \n\n\n### Fun exercises \n\n\nTry to convince Chat to behave like Clippy.\n\n![Chat\nprompt](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image22_aHR0cHM6_1750097645366.png)\n\n\nAsk about GitLab's mission: \"Everyone can contribute.\"\n\n\n![Chat\nprompt](https://res.cloudinary.com/about-gitlab-com/image/upload/v1750097645/Blog/Content%20Images/Blog/Content%20Images/image33_aHR0cHM6_1750097645367.png)\n\n\n### Learn more\n\n\nThere are many different environments and challenges out there. We have\nupdated the [GitLab Duo Chat\ndocumentation](https://docs.gitlab.com/ee/user/gitlab_duo_chat.html) with\nmore practical examples, and added a new [GitLab Duo\nexamples](https://docs.gitlab.com/ee/user/gitlab_duo_examples.html) section\nwith deep dives into AI-powered DevSecOps workflows, including Chat.\n\n\n> Want to get going with GitLab Duo Chat? [Start your free trial\ntoday](https://about.gitlab.com/solutions/gitlab-duo-pro/self-managed-and-gitlab-dedicated-trial/).\n",[803,719,9,695],{"slug":4387,"featured":91,"template":698},"10-best-practices-for-using-ai-powered-gitlab-duo-chat","content:en-us:blog:10-best-practices-for-using-ai-powered-gitlab-duo-chat.yml","10 Best Practices For Using Ai Powered Gitlab Duo Chat","en-us/blog/10-best-practices-for-using-ai-powered-gitlab-duo-chat.yml","en-us/blog/10-best-practices-for-using-ai-powered-gitlab-duo-chat",22,[678,703,727,745,767,788,812,833,854],1759347889021]