diff --git a/CarneiroTech.csproj b/CarneiroTech.csproj index cb01c91..97fbad7 100644 --- a/CarneiroTech.csproj +++ b/CarneiroTech.csproj @@ -11,4 +11,10 @@ + + + PreserveNewest + + + diff --git a/Content/Cases/pt/asp-to-dotnet-migration.md b/Content/Archive/Consulting/asp-to-dotnet-migration.md similarity index 100% rename from Content/Cases/pt/asp-to-dotnet-migration.md rename to Content/Archive/Consulting/asp-to-dotnet-migration.md diff --git a/Content/Cases/pt/cnpj-fast-process.md b/Content/Archive/Consulting/cnpj-fast-process.md similarity index 100% rename from Content/Cases/pt/cnpj-fast-process.md rename to Content/Archive/Consulting/cnpj-fast-process.md diff --git a/Content/Cases/pt/cnpj-migration-database.md b/Content/Archive/Consulting/cnpj-migration-database.md similarity index 100% rename from Content/Cases/pt/cnpj-migration-database.md rename to Content/Archive/Consulting/cnpj-migration-database.md diff --git a/Content/Cases/pt/industrial-learning-platform.md b/Content/Archive/Consulting/industrial-learning-platform.md similarity index 100% rename from Content/Cases/pt/industrial-learning-platform.md rename to Content/Archive/Consulting/industrial-learning-platform.md diff --git a/Content/Cases/pt/pharma-digital-transformation.md b/Content/Archive/Consulting/pharma-digital-transformation.md similarity index 100% rename from Content/Cases/pt/pharma-digital-transformation.md rename to Content/Archive/Consulting/pharma-digital-transformation.md diff --git a/Content/Cases/pt/sap-integration-healthcare.md b/Content/Archive/Consulting/sap-integration-healthcare.md similarity index 100% rename from Content/Cases/pt/sap-integration-healthcare.md rename to Content/Archive/Consulting/sap-integration-healthcare.md diff --git a/Content/Cases/Retail/vostro-5470-resgate.md b/Content/Cases/Retail/vostro-5470-resgate.md new file mode 100644 index 0000000..176704e --- /dev/null +++ b/Content/Cases/Retail/vostro-5470-resgate.md @@ -0,0 +1,61 @@ +--- +title: "O Resgate do Vostro 5470: De 'Lixo Eletrônico' a Máquina de Performance" +slug: "vostro-5470-resgate-performance" +summary: "Um ultrabook condenado por superaquecimento e lentidão extrema foi transformado em uma ferramenta de trabalho ágil, economizando a compra de um novo notebook." +client: "Cliente Particular (SBC)" +device_model: "Dell Vostro 5470" +estimated_savings: 3500.00 +category: "Resgate" +thumbnail: "/assets/img/portfolio/vostro-antes.jpg" +image: "/assets/img/portfolio/vostro-depois.jpg" +tags: ["Dell", "SSD Upgrade", "Limpeza Técnica", "Reparo de Carcaça"] +featured: true +order: 1 +date: 2024-02-15 +seo_title: "Case de Sucesso: Reparo e Upgrade Dell Vostro 5470 em SBC" +seo_description: "Veja como economizamos R$ 3.500,00 para um cliente de São Bernardo do Campo recuperando um Dell Vostro 5470." +seo_keywords: "Dell Vostro 5470, reparo notebook SBC, upgrade SSD, Golden Square Shopping" +--- + +
+ Comparativo Antes e Depois do Resgate +

À esquerda: Equipamento com danos estruturais e térmicos. À direita: Equipamento restaurado e atualizado.

+
+ +## O Desafio: Um veredito de "Morte Súbita" + +O cliente chegou à **CarneiroTech** com um Dell Vostro 5470 que apresentava o clássico problema deste modelo: superaquecimento severo, dobradiças quebradas e uma lentidão que tornava o Windows 10 inutilizável. Em outras assistências, o veredito foi direto: *"Não vale a pena consertar, compre um novo"*. + +Com o preço dos notebooks atuais com performance equivalente girando em torno de **R$ 4.000,00**, o desafio era provar que a engenharia de precisão e os componentes certos poderiam dar uma segunda vida ao equipamento. + +## Nossa Abordagem: Storytelling Técnico + +### 1. Diagnóstico Térmico e Estrutural +Identificamos que a pasta térmica original estava cristalizada, impedindo a dissipação de calor. As dobradiças, presas em suportes plásticos frágeis, estavam forçando a carcaça. + +### 2. Intervenção Cirúrgica +- **Reparo de Carcaça:** Reconstrução das torres de fixação com resina industrial, garantindo que o abre-e-fecha fosse suave novamente. +- **Cooling Mod:** Limpeza completa do sistema de exaustão e aplicação de pasta térmica de alta condutividade (Arctic Silver). + +### 3. O "Pulo do Gato": Upgrade de Performance +Substituímos o HD mecânico de 5400 RPM por um **SSD SATA III de alta performance** e expandimos a memória RAM. O resultado? O boot que levava 3 minutos passou para **12 segundos**. + +## Resultado: Economia Real no Bolso + +O investimento total do cliente no resgate foi inferior a 15% do valor de um notebook novo. + +- **Custo do Reparo/Upgrade:** ~R$ 500,00 +- **Valor de um Notebook Novo Equivalente:** ~R$ 4.000,00 +- **Economia Estimada:** **R$ 3.500,00** + +Hoje, o Vostro 5470 roda as ferramentas de trabalho do cliente com fluidez total, sem aquecer e com a estrutura reforçada para durar mais alguns anos. + +--- + +### Precisa de um diagnóstico real para o seu equipamento? +Não descarte seu investimento antes de uma análise técnica especializada. + +**📍 Onde estamos:** Golden Square Shopping, São Bernardo do Campo. +**🚀 Foco:** Performance máxima com o melhor custo-benefício. + +[**AGENDAR AVALIAÇÃO AGORA**](/#contact) diff --git a/Content/Cases/en/asp-to-dotnet-migration.md b/Content/Cases/en/asp-to-dotnet-migration.md deleted file mode 100644 index f63148b..0000000 --- a/Content/Cases/en/asp-to-dotnet-migration.md +++ /dev/null @@ -1,329 +0,0 @@ ---- -title: "ASP 3.0 to .NET Core Migration - Cargo Tracking System" -slug: "asp-to-dotnet-migration" -summary: "Tech Lead in gradual migration of mission-critical ASP 3.0 system to .NET Core, with dual-write data synchronization and cost reduction of $20k/year in mapping APIs." -client: "Logistics and Tracking Company" -industry: "Logistics & Security" -timeline: "12 months (complete migration)" -role: "Tech Lead & Solution Architect" -image: "" -tags: - - ASP Classic - - .NET Core - - SQL Server - - Migration - - Tech Lead - - OSRM - - APIs - - Architecture -featured: true -order: 2 -date: 2015-06-01 -seo_title: "ASP 3.0 to .NET Core Migration - Carneiro Tech Case Study" -seo_description: "Case study of gradual ASP 3.0 to .NET Core migration with data synchronization and $20k/year cost savings in API expenses." -seo_keywords: "ASP migration, .NET Core, legacy modernization, SQL Server, OSRM, tech lead, routing API" ---- - -## Overview - -Mission-critical cargo monitoring system for high-value loads (LED TVs worth $600 each, shipments up to 1000 units) using GPS satellite tracking. The application covered the entire lifecycle: from driver registration and evaluation (police background checks) to real-time monitoring and final delivery. - -**Main challenge:** Migrate legacy ASP 3.0 application to .NET Core with zero downtime, maintaining 24/7 critical operations. - ---- - -## Challenge - -### Critical Legacy System - -The company operated a mission-critical system in **ASP 3.0** (Classic ASP) that couldn't stop: - -**Legacy technology:** -- ASP 3.0 (1998 technology) -- SQL Server 2005 -- On-premises failover cluster (perfectly capable of handling the load) -- Integration with GPS satellite trackers -- Google Maps API (cost: **$20,000/year** just for route calculation) - -**Constraints:** -- 24/7 system operation with high-value cargo -- No downtime allowed during migration -- Multiple interdependent modules -- Team needed to continue developing features during migration - ---- - -## Solution Architecture - -### Phase 1: Infrastructure Preparation (Months 1-3) - -#### Database Upgrade -``` -SQL Server 2005 → SQL Server 2014 -- Full backup and validation -- Stored procedure migration -- Index optimization -- Performance testing -``` - -#### Dual-Write Synchronization Strategy - -I implemented a **bidirectional synchronization system** that allowed: - -1. **New modules (.NET Core)** wrote to the new database -2. **Automatic trigger** synchronized data to the legacy database -3. **Old modules (ASP 3.0)** continued working normally -4. **Zero downtime** throughout the entire migration - -```csharp -// Synchronization implementation example -public class DualWriteService -{ - public async Task SaveDriver(Driver driver) - { - // Write to new database (.NET Core) - await _newDbContext.Drivers.AddAsync(driver); - await _newDbContext.SaveChangesAsync(); - - // SQL trigger automatically syncs to legacy database - // ASP 3.0 modules continue functioning - } -} -``` - -**Why this approach?** -- Enabled **module-by-module** migration -- Team could continue developing -- Simple rollback if needed -- Reduced operational risk - ---- - -### Phase 2: Gradual Module Migration (Months 4-12) - -I migrated modules in increasing complexity order: - -**Migration sequence:** -1. ✅ Basic registrations (drivers, vehicles) -2. ✅ Risk assessment (police database integration) -3. ✅ Cargo and route management -4. ✅ Real-time GPS monitoring -5. ✅ Alerts and notifications -6. ✅ Reports and analytics - -**Migrated application stack:** -- `.NET Core 1.0` (2015-2016 was the beginning of .NET Core) -- `Entity Framework Core` -- `SignalR` for real-time monitoring -- `SQL Server 2014` -- RESTful APIs - ---- - -### Phase 3: Cost Reduction with OSRM ($20k/year Savings) - -#### Problem: Prohibitive Google Maps Cost - -The company spent **$20,000/year** just on Google Maps Directions API for truck route calculation. - -#### Solution: OSRM (Open Source Routing Machine) - -I implemented a solution based on **OSRM** (open-source routing engine): - -**Solution architecture:** - -``` -┌─────────────────┐ -│ Frontend │ -│ (Leaflet.js) │ -└────────┬────────┘ - │ - ▼ -┌─────────────────┐ ┌──────────────┐ -│ API Wrapper │─────▶│ OSRM Server │ -│ (.NET Core) │ │ (self-hosted)│ -└────────┬────────┘ └──────────────┘ - │ - ▼ -┌─────────────────┐ -│ Google Maps │ -│ (display only) │ -└─────────────────┘ -``` - -**Implementation:** - -1. **OSRM Server configured** on own server -2. **User-friendly API wrapper** in .NET Core that: - - Received origin/destination - - Queried OSRM (free) - - Returned all route points - - Formatted for frontend -3. **Frontend** drew the route on Google Maps (visualization only, no routing API) - -```csharp -[HttpGet("route")] -public async Task GetRoute(double originLat, double originLng, - double destLat, double destLng) -{ - // Query OSRM (free) - var osrmResponse = await _osrmClient.GetRouteAsync( - originLat, originLng, destLat, destLng); - - // Return formatted points for frontend - return Ok(new { - points = osrmResponse.Routes[0].Geometry.Coordinates, - distance = osrmResponse.Routes[0].Distance, - duration = osrmResponse.Routes[0].Duration - }); -} -``` - -**Frontend with Leaflet:** - -```javascript -// Draw route on map (Google Maps only for tiles) -L.polyline(routePoints, {color: 'red'}).addTo(map); -``` - -#### OpenStreetMap Attempt - -I tried to also replace Google Maps (tiles) with **OpenStreetMap**, which worked technically, but: - -❌ **Users didn't like** the appearance -❌ Preferred the familiar Google Maps interface - -✅ **Decision:** Keep Google Maps for visualization only (no routing API cost) - -**Result:** Savings of **~$20,000/year** while maintaining route quality. - ---- - -## Results & Impact - -### Complete Migration in 12 Months - -✅ **100% of modules** migrated from ASP 3.0 to .NET Core -✅ **Zero downtime** throughout the entire migration -✅ **Productive team** throughout the process -✅ Faster and more scalable system - -### Cost Reduction - -💰 **$20,000/year saved** by replacing Google Maps Directions API -📉 **Optimized infrastructure** with SQL Server 2014 - -### Technical Improvements - -🚀 **Performance:** .NET Core application 3x faster than ASP 3.0 -🔒 **Security:** Modern stack with active security patches -🛠️ **Maintainability:** Modern C# code vs legacy VBScript -📊 **Monitoring:** SignalR for more efficient real-time tracking - ---- - -## Unexecuted Phase: Microservices & Cloud - -### Initial Planning - -I participated in the **design and conception** of the second phase (never executed): - -**Planned architecture:** -- Migration to **Azure** (cloud was just starting in 2015) -- Break into **microservices**: - - Authentication service - - GPS/tracking service - - Routing service - - Notification service -- **Event-driven architecture** with message queues - -**Why it wasn't executed:** - -I left the company right after completing the .NET Core migration. The second phase was planned but not implemented by me. - ---- - -## Tech Stack - -`ASP 3.0` `VBScript` `.NET Core 1.0` `C#` `Entity Framework Core` `SQL Server 2005` `SQL Server 2014` `OSRM` `Leaflet.js` `Google Maps` `SignalR` `REST APIs` `GPS/Satellite` `Migration Strategy` `Dual-Write Pattern` - ---- - -## Key Decisions & Trade-offs - -### Why dual-write synchronization? - -**Alternatives considered:** -1. ❌ Big Bang migration (too risky) -2. ❌ Keep everything in ASP 3.0 (unsustainable) -3. ✅ **Gradual migration with sync** (chosen) - -**Rationale:** -- Critical system couldn't stop -- Enabled module-by-module rollback -- Team remained productive - -### Why OSRM instead of others? - -**Alternatives:** -- Google Maps: $20k/year ❌ -- Mapbox: Paid license ❌ -- GraphHopper: Complex setup ❌ -- **OSRM: Open-source, fast, configurable** ✅ - -### Why not OpenStreetMap for tiles? - -**UX-based decision:** -- Technically worked perfectly -- Users preferred familiar Google interface -- **Compromise:** Google Maps for visualization (free) + OSRM for routing (free) - ---- - -## Lessons Learned - -### 1. Gradual Migration > Big Bang - -Migrating module by module with synchronization enabled: -- Continuous learning -- Route adjustments during the process -- Team and stakeholder confidence - -### 2. Open Source Can Save a Lot - -OSRM saved **$20k/year** without quality loss. But requires: -- Expertise to configure -- Own infrastructure -- Continuous maintenance - -### 3. UX > Technology Sometimes - -OpenStreetMap was technically superior (free), but users preferred Google Maps. **Lesson:** Listen to end users. - -### 4. Plan for Cloud, but Validate ROI - -In 2015, cloud was just starting. On-premises infrastructure (SQL Server cluster) was perfectly capable. **Don't force cloud if there's no clear benefit.** - ---- - -## Context: Why 2015 Was a Special Moment? - -**State of technology in 2015:** - -- ☁️ **Cloud in early stages:** AWS existed, Azure growing, but low corporate adoption -- 🆕 **.NET Core 1.0 launched** in June 2016 (we used RC during the project) -- 📱 **Microservices:** New concept, Docker in early adoption -- 🗺️ **Google Maps dominant:** Paid APIs, few mature open-source alternatives - -**Challenges of the time:** -- Non-existent ASP→.NET migration tools -- Scarce .NET Core documentation (version 1.0!) -- Architecture patterns still consolidating - -This project was **pioneering** in adopting .NET Core right at the beginning, when most were migrating to .NET Framework 4.x. - ---- - -**Result:** Successful migration of 24/7 critical system, $20k/year savings, and solid foundation for future evolution. - -[Want to discuss a similar migration? Get in touch](#contact) diff --git a/Content/Cases/en/cnpj-fast-process.md b/Content/Cases/en/cnpj-fast-process.md deleted file mode 100644 index cd49beb..0000000 --- a/Content/Cases/en/cnpj-fast-process.md +++ /dev/null @@ -1,382 +0,0 @@ ---- -title: "CNPJ Fast - Alphanumeric CNPJ Migration Process" -slug: "cnpj-fast-process" -summary: "Creation of structured methodology for migrating applications to the new Brazilian alphanumeric CNPJ format, sold to insurance company and collection agency." -client: "Consulting Firm (Internal)" -industry: "Consulting & Digital Transformation" -timeline: "3 months (process creation)" -role: "Solution Architect & Process Designer" -image: "" -tags: - - Process Design - - CNPJ - - Migration Strategy - - Regulatory Compliance - - Consulting - - Sales Enablement -featured: true -order: 3 -date: 2024-09-01 -seo_title: "CNPJ Fast - Alphanumeric CNPJ Migration Methodology" -seo_description: "Case study of creating a structured process for migrating to Brazilian alphanumeric CNPJ, sold to insurance company and collection agency." -seo_keywords: "CNPJ alphanumeric, migration process, regulatory compliance, consulting, methodology" ---- - -## Overview - -With the introduction of **alphanumeric CNPJ** by the Brazilian Federal Revenue Service, companies faced the challenge of adapting their legacy applications that stored CNPJ as numeric fields (`bigint`, `numeric`, `int`). - -I created **CNPJ Fast**, a structured methodology to assess, plan, and execute CNPJ migrations in corporate applications and databases. - -**Result:** Process sold to **2 clients** (insurance company and collection agency) before implementation. - ---- - -## Challenge - -### Complex Regulatory Change - -**Regulatory context:** -- Brazilian Federal Revenue Service introduced **alphanumeric CNPJ** -- CNPJ is no longer just numbers (14 digits) -- Now accepts **letters and numbers** (alphanumeric format) - -**Impact on companies:** - -```sql --- BEFORE: Numeric CNPJ -CNPJ BIGINT -- 12345678000190 - --- AFTER: Alphanumeric CNPJ -CNPJ VARCHAR(18) -- 12.ABC.678/0001-90 -``` - -**Identified problems:** - -1. **Database:** `BIGINT`, `NUMERIC`, `INT` columns don't support characters -2. **Primary keys:** CNPJ used as PK in several tables -3. **Foreign keys:** Relationships between tables -4. **Volume:** Millions of records to migrate -5. **Applications:** Validations, masks, business rules -6. **Testing:** Ensure integrity after migration -7. **Downtime:** Limited maintenance windows - -**Without a structured process**, companies risked: -- Data loss -- Database inconsistencies -- Broken applications -- Extended downtime - ---- - -## Solution: CNPJ Fast Process - -### 5-Phase Methodology - -I designed a structured process that could be replicated across different clients: - -``` -┌─────────────────────────────────────────────┐ -│ PHASE 1: DISCOVERY & ASSESSMENT │ -│ - Application inventory │ -│ - Database schema analysis │ -│ - Identification of impacted tables │ -│ - Data volume estimation │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ PHASE 2: IMPACT ANALYSIS │ -│ - Dependency mapping │ -│ - Analysis of primary/foreign keys │ -│ - Identification of business rules │ -│ - Risk assessment │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ PHASE 3: MIGRATION PLANNING │ -│ - Migration strategy (phased commits) │ -│ - Automated SQL scripts │ -│ - Rollback plan │ -│ - Maintenance windows │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ PHASE 4: EXECUTION │ -│ - Batch data migration │ -│ - Application updates │ -│ - Integration testing │ -│ - Integrity validation │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ PHASE 5: VALIDATION & GO-LIVE │ -│ - Regression testing │ -│ - Performance validation │ -│ - Coordinated go-live │ -│ - Post-migration monitoring │ -└─────────────────────────────────────────────┘ -``` - ---- - -### Phase 1: Discovery & Assessment - -**Objective:** Understand the complete migration scope - -**Deliverables:** - -1. **Application Inventory** - - List of applications using CNPJ - - Technologies (ASP 3.0, VB6, .NET, microservices) - - Criticality of each application - -2. **Schema Analysis** - ```sql - -- Automated discovery script - SELECT - t.TABLE_SCHEMA, - t.TABLE_NAME, - c.COLUMN_NAME, - c.DATA_TYPE, - c.CHARACTER_MAXIMUM_LENGTH - FROM INFORMATION_SCHEMA.TABLES t - JOIN INFORMATION_SCHEMA.COLUMNS c - ON t.TABLE_NAME = c.TABLE_NAME - WHERE c.COLUMN_NAME LIKE '%CNPJ%' - AND c.DATA_TYPE IN ('bigint', 'numeric', 'int') - ORDER BY t.TABLE_SCHEMA, t.TABLE_NAME; - ``` - -3. **Volume Estimation** - - Total records per table - - Size in GB - - Estimated migration time - -**Example output:** - -| Table | Column | Current Type | Records | Criticality | -|--------|--------|------------|-----------|-------------| -| Clients | CNPJ_Client | BIGINT | 8,000,000 | High | -| Suppliers | CNPJ_Supplier | NUMERIC(14) | 2,500,000 | Medium | -| Transactions | CNPJ_Payer | BIGINT | 90,000,000 | Critical | - ---- - -### Phase 2: Impact Analysis - -**Objective:** Map all dependencies and risks - -**Key analysis:** - -```sql --- Identifies PKs and FKs involving CNPJ -SELECT - fk.name AS FK_Name, - tp.name AS Parent_Table, - cp.name AS Parent_Column, - tr.name AS Referenced_Table, - cr.name AS Referenced_Column -FROM sys.foreign_keys fk -INNER JOIN sys.tables tp ON fk.parent_object_id = tp.object_id -INNER JOIN sys.foreign_key_columns fkc ON fk.object_id = fkc.constraint_object_id -INNER JOIN sys.columns cp ON fkc.parent_column_id = cp.column_id - AND fkc.parent_object_id = cp.object_id -INNER JOIN sys.tables tr ON fk.referenced_object_id = tr.object_id -INNER JOIN sys.columns cr ON fkc.referenced_column_id = cr.column_id - AND fkc.referenced_object_id = cr.object_id -WHERE cp.name LIKE '%CNPJ%' OR cr.name LIKE '%CNPJ%'; -``` - -**Risk Assessment:** - -- **High:** Tables with CNPJ as PK and >10M records -- **Medium:** Tables with FK to CNPJ -- **Low:** Tables without constraints - ---- - -### Phase 3: Migration Planning - -**Gradual migration strategy:** - -To avoid database locks, I designed a **phased commits** strategy: - -```sql --- Strategy for large tables (>1M records) - --- 1. Add new VARCHAR column -ALTER TABLE Clients -ADD CNPJ_Client_New VARCHAR(18) NULL; - --- 2. Migration in batches (phased commits) -DECLARE @BatchSize INT = 100000; -DECLARE @RowsAffected INT = 1; - -WHILE @RowsAffected > 0 -BEGIN - UPDATE TOP (@BatchSize) Clients - SET CNPJ_Client_New = FORMAT(CNPJ_Client, '00000000000000') - WHERE CNPJ_Client_New IS NULL; - - SET @RowsAffected = @@ROWCOUNT; - WAITFOR DELAY '00:00:01'; -- Pause between batches -END; - --- 3. Remove constraints (PKs, FKs) -ALTER TABLE Clients DROP CONSTRAINT PK_Clients; - --- 4. Rename columns -EXEC sp_rename 'Clients.CNPJ_Client', 'CNPJ_Client_Old', 'COLUMN'; -EXEC sp_rename 'Clients.CNPJ_Client_New', 'CNPJ_Client', 'COLUMN'; - --- 5. Recreate constraints -ALTER TABLE Clients -ADD CONSTRAINT PK_Clients PRIMARY KEY (CNPJ_Client); - --- 6. Remove old column (after validation) -ALTER TABLE Clients DROP COLUMN CNPJ_Client_Old; -``` - -**Why this approach?** - -- Avoids locking entire table -- Allows pausing/resuming migration -- Minimizes production impact -- Facilitates rollback if needed - ---- - -### Phases 4 & 5: Execution and Validation - -**Execution checklist:** - -- [ ] Complete database backup -- [ ] Execute migration scripts in batches -- [ ] Update applications (validations, masks) -- [ ] Integration testing -- [ ] Referential integrity validation -- [ ] Performance testing -- [ ] Coordinated go-live -- [ ] 24h post-migration monitoring - ---- - -## Sales Enablement: UX Presentation - -**Collaboration with UX Manager:** - -The company's UX manager created an **impactful visual presentation** of the CNPJ Fast process: - -**Presentation content:** -- Infographics of the 5-phase process -- Examples of time/cost estimates -- Use cases (insurance, banks, fintechs) -- Executive checklist -- Documentation templates - -**Result:** Presentation used by sales team for prospecting. - ---- - -## Results & Impact - -### Sales Achieved - -**Client 1: Insurance Company** -- Stack: ASP 3.0, VB6 components, .NET, microservices -- Scope: Complete legacy application migration -- Status: **Project sold** (execution by another team) -- Value: [Confidential] - -**Client 2: Collection Agency** -- Scope: Database migration (~100M records) -- Status: **Project sold and in execution** (by me) -- Particularity: Process was **restructured** to meet specific needs -- See complete case study: [CNPJ Migration - 100M Records](/cases/cnpj-migration-database) - ---- - -### Business Impact - -**2 projects sold** before first execution -**Replicable process** for new clients -**Positioning** as specialist in regulatory migrations -**Knowledge base** for future similar projects - ---- - -### Technical Impact - -**Tested methodology** in real scenarios -**Reusable documentation** (scripts, checklists, templates) -**Acceleration** of similar projects (from weeks to days) - ---- - -## Tech Stack - -`SQL Server` `Migration Strategy` `Process Design` `Regulatory Compliance` `ASP 3.0` `VB6` `.NET` `Microservices` `Batch Processing` `Database Optimization` - ---- - -## Key Decisions & Trade-offs - -### Why structured process? - -**Alternatives:** -1. Ad-hoc approach per project -2. Manual consulting without methodology -3. **Replicable and scalable process** - -**Justification:** -- Reduces Discovery time -- Standardizes deliveries -- Facilitates sales (ready presentation) -- Allows execution by different teams - -### Why separate into 5 phases? - -**Benefits:** -- Client can approve phase by phase -- Allows adjustments during process -- Facilitates risk management -- Incremental deliveries - ---- - -## Lessons Learned - -### 1. UX/Presentation Matters for Sales - -The visual presentation made by the UX manager was **crucial** to closing both contracts. Good technical process + poor presentation = no sales. - -### 2. Process Sells, Not Just Execution - -Creating a **documented methodology** has more commercial value than just offering "consulting hours." - -### 3. Each Client is Unique - -The client requested **process restructuring**. A good process should be: -- Structured enough to be replicable -- Flexible enough to customize - -### 4. Multidisciplinary Collaboration - -Working with UX manager (presentations) + sales team (sales) + technical (execution) = success. - ---- - -## Next Steps - -**Future opportunities:** - -1. **Expansion:** Offer CNPJ Fast to more sectors (banks, fintechs, retail) -2. **Product:** Transform into automated tool (SaaS) -3. **Training:** Enable clients' internal teams -4. **Evolution:** Adapt process for other regulatory migrations (PIX, Open Banking) - ---- - -**Result:** Structured methodology that became a sellable product, generating revenue before the first technical execution. - -[Want to implement CNPJ Fast in your company? Get in touch](#contact) diff --git a/Content/Cases/en/cnpj-migration-database.md b/Content/Cases/en/cnpj-migration-database.md deleted file mode 100644 index ed08981..0000000 --- a/Content/Cases/en/cnpj-migration-database.md +++ /dev/null @@ -1,469 +0,0 @@ ---- -title: "Alphanumeric CNPJ Migration - 100 Million Records" -slug: "cnpj-migration-database" -summary: "Execution of massive CNPJ migration from numeric to alphanumeric in database with ~100M records, using phased commit strategy to avoid database locks." -client: "Collection Agency" -industry: "Collections & Financial Services" -timeline: "In execution" -role: "Database Architect & Tech Lead" -image: "" -tags: - - SQL Server - - Database Migration - - CNPJ - - Performance Optimization - - Batch Processing - - Big Data -featured: true -order: 4 -date: 2024-11-01 -seo_title: "Alphanumeric CNPJ Migration - 100M Records | Carneiro Tech" -seo_description: "Case study of massive CNPJ migration in database with 100 million records using phased commits and performance optimizations." -seo_keywords: "database migration, SQL Server, CNPJ, batch processing, performance optimization, phased commits" ---- - -## Overview - -A collection agency that works with transitory data databases (no proprietary software) needs to adapt its systems to the new Brazilian **alphanumeric CNPJ** format. - -**Main challenge:** Migrate ~**100 million records** in tables with `BIGINT` and `NUMERIC` columns to `VARCHAR`, without locking the production database. - -**Status:** Project in execution (migration script preparation). - ---- - -## Challenge - -### Massive Data Volume - -**Company context:** -- Collection agency (does not develop proprietary software) -- Works with **transitory data** (high turnover) -- SQL Server database with critical volume - -**Initial analysis revealed:** - -| Table | Column | Current Type | Records | Size | -|--------|--------|------------|-----------|---------| -| Debtors | CNPJ_Debtor | BIGINT | 8,000,000 | 60 GB | -| Transactions | CNPJ_Payer | NUMERIC(14) | 90,000,000 | 1.2 TB | -| Companies | CNPJ_Company | BIGINT | 2,500,000 | 18 GB | -| **TOTAL** | - | - | **~100,000,000** | **~1.3 TB** | - -**Identified problems:** - -1. **Tables with 8M+ rows** using `BIGINT` for CNPJ -2. **90 million records** in transactions table -3. **CNPJ as primary key** in some tables -4. **Foreign keys** relating multiple tables -5. **Impossibility of extended downtime** (24/7 operation) -6. **Disk space restrictions** (requires efficient strategy) - ---- - -## Strategic Decision: Phased Commits - -### Why NOT do ALTER COLUMN directly? - -**Naive approach (DOESN'T work):** - -```sql --- NEVER DO THIS ON LARGE TABLES -ALTER TABLE Transactions -ALTER COLUMN CNPJ_Payer VARCHAR(18); -``` - -**Problems:** -- Locks entire table during conversion -- Can take hours/days on large tables -- Blocks all operations (INSERT, UPDATE, SELECT) -- Risk of timeout or failure mid-operation -- Complex rollback if something goes wrong - ---- - -### Chosen Strategy: Column Swap with Phased Commits - -**Based on previous experience**, I decided to use a gradual approach: - -``` -┌─────────────────────────────────────────────┐ -│ 1. Create new VARCHAR column at END │ -│ (fast operation, doesn't lock table) │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ 2. UPDATE in batches (phased commits) │ -│ - 100k records at a time │ -│ - Pause between batches (avoid lock) │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ 3. Remove PKs and FKs │ -│ (after 100% migrated) │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ 4. Rename columns (swap) │ -│ - CNPJ → CNPJ_Old │ -│ - CNPJ_New → CNPJ │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ 5. Recreate PKs/FKs with new column │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ 6. Validation and old column deletion │ -└─────────────────────────────────────────────┘ -``` - -**Why this approach?** - -**No complete table lock** (incremental operation) -**Can pause/resume** at any time -**Real-time progress monitoring** -**Simple rollback** (just drop new column) -**Minimizes production impact** (small commits) - -**Decision based on:** -- Previous experience with large volume migrations -- Knowledge of SQL Server locks -- Need for zero downtime - -**Note:** This decision was made **without consulting AI** - based purely on practical experience from previous projects. - ---- - -## Implementation Details - -### Phase 1: Create New Column - -```sql --- Fast operation (metadata change only) -ALTER TABLE Transactions -ADD CNPJ_Payer_New VARCHAR(18) NULL; - --- Add temporary index to speed up lookups -CREATE NONCLUSTERED INDEX IX_Temp_CNPJ_New -ON Transactions(CNPJ_Payer_New) -WHERE CNPJ_Payer_New IS NULL; -``` - -**Estimated time:** ~1 second (independent of table size) - ---- - -### Phase 2: Batch Migration (Core Strategy) - -```sql --- Migration script with phased commits -DECLARE @BatchSize INT = 100000; -- 100k records per batch -DECLARE @RowsAffected INT = 1; -DECLARE @TotalProcessed INT = 0; -DECLARE @StartTime DATETIME = GETDATE(); - -WHILE @RowsAffected > 0 -BEGIN - BEGIN TRANSACTION; - - -- Update batch of 100k records not yet migrated - UPDATE TOP (@BatchSize) Transactions - SET CNPJ_Payer_New = RIGHT('00000000000000' + CAST(CNPJ_Payer AS VARCHAR), 14) - WHERE CNPJ_Payer_New IS NULL; - - SET @RowsAffected = @@ROWCOUNT; - SET @TotalProcessed = @TotalProcessed + @RowsAffected; - - COMMIT TRANSACTION; - - -- Progress log - PRINT 'Processed: ' + CAST(@TotalProcessed AS VARCHAR) + ' rows. Batch: ' + CAST(@RowsAffected AS VARCHAR); - PRINT 'Elapsed time: ' + CAST(DATEDIFF(SECOND, @StartTime, GETDATE()) AS VARCHAR) + ' seconds'; - - -- Pause between batches (reduces contention) - WAITFOR DELAY '00:00:01'; -- 1 second between batches -END; - -PRINT 'Migration completed! Total rows: ' + CAST(@TotalProcessed AS VARCHAR); -``` - -**Configurable parameters:** - -- `@BatchSize`: 100k (balanced between performance and lock time) - - Too small = many transactions, overhead - - Too large = prolonged lock, production impact -- `WAITFOR DELAY`: 1 second (gives time for other queries to run) - -**Time estimates:** - -| Records | Batch Size | Estimated Time | -|-----------|------------|----------------| -| 8,000,000 | 100,000 | ~2-3 hours | -| 90,000,000 | 100,000 | ~20-24 hours | - -**Advantages:** -- Doesn't freeze application -- Other queries can run between batches -- Can pause (Ctrl+C) and resume later (WHERE NULL picks up where it left off) -- Real-time progress log - ---- - -### Phase 3: Constraint Removal - -```sql --- Identifies all PKs and FKs involving the column -SELECT name -FROM sys.key_constraints -WHERE type = 'PK' - AND parent_object_id = OBJECT_ID('Transactions') - AND COL_NAME(parent_object_id, parent_column_id) = 'CNPJ_Payer'; - --- Remove PKs -ALTER TABLE Transactions -DROP CONSTRAINT PK_Transactions_CNPJ; - --- Remove FKs (tables that reference) -ALTER TABLE Payments -DROP CONSTRAINT FK_Payments_Transactions; -``` - -**Estimated time:** ~10 minutes (depends on how many constraints exist) - ---- - -### Phase 4: Column Swap (Renaming) - -```sql --- Rename old column to _Old -EXEC sp_rename 'Transactions.CNPJ_Payer', 'CNPJ_Payer_Old', 'COLUMN'; - --- Rename new column to original name -EXEC sp_rename 'Transactions.CNPJ_Payer_New', 'CNPJ_Payer', 'COLUMN'; - --- Change to NOT NULL (after validating 100% populated) -ALTER TABLE Transactions -ALTER COLUMN CNPJ_Payer VARCHAR(18) NOT NULL; -``` - -**Estimated time:** ~1 second (metadata change) - ---- - -### Phase 5: Constraint Recreation - -```sql --- Recreate PK with new VARCHAR column -ALTER TABLE Transactions -ADD CONSTRAINT PK_Transactions_CNPJ -PRIMARY KEY CLUSTERED (CNPJ_Payer); - --- Recreate FKs -ALTER TABLE Payments -ADD CONSTRAINT FK_Payments_Transactions -FOREIGN KEY (CNPJ_Payer) REFERENCES Transactions(CNPJ_Payer); -``` - -**Estimated time:** ~30-60 minutes (depends on volume) - ---- - -### Phase 6: Validation and Cleanup - -```sql --- Validate that 100% was migrated -SELECT COUNT(*) -FROM Transactions -WHERE CNPJ_Payer IS NULL OR CNPJ_Payer = ''; - --- Validate referential integrity -DBCC CHECKCONSTRAINTS WITH ALL_CONSTRAINTS; - --- If everything OK, remove old column -ALTER TABLE Transactions -DROP COLUMN CNPJ_Payer_Old; - --- Remove temporary index -DROP INDEX IX_Temp_CNPJ_New ON Transactions; -``` - ---- - -## CNPJ Fast Process Customization - -### Differences vs. Original Process - -The original **CNPJ Fast** process was **restructured** for this client: - -**Main changes:** - -| Aspect | Original CNPJ Fast | Client (Customized) | -|---------|-------------------|---------------------| -| **Focus** | Applications + DB | DB only (no proprietary software) | -| **Discovery** | App inventory | Schema analysis only | -| **Execution** | Multiple applications | Massive SQL scripts | -| **Batch Size** | 50k-100k | 100k (optimized for volume) | -| **Monitoring** | Manual + tools | Real-time SQL logs | -| **Rollback** | Complex process | Simple (DROP COLUMN) | - -**Reason for restructuring:** -- Client has no proprietary applications (only consumes data) -- 100% focus on database optimization -- Much larger volume than typical cases (100M vs ~10M) - ---- - -## Tech Stack - -`SQL Server` `T-SQL` `Batch Processing` `Performance Tuning` `Database Optimization` `Migration Scripts` `Phased Commits` `Index Optimization` `Constraint Management` - ---- - -## Key Decisions & Trade-offs - -### Why 100k per batch? - -**Performance tests:** - -| Batch Size | Time/Batch | Lock Duration | Contention | -|------------|-------------|---------------|-----------| -| 10,000 | 2s | Low | Minimal | -| 50,000 | 8s | Medium | Acceptable | -| **100,000** | 15s | **Medium** | **Balanced** | -| 500,000 | 90s | High | Production impact | -| 1,000,000 | 180s | Very high | Unacceptable | - -**Choice:** 100k offers best balance between performance and impact. - ---- - -### Why create column at END? - -**SQL Server internals:** -- Add column at end = metadata change (fast) -- Add in middle = page rewrite (slow) -- For large tables, position matters - ---- - -### Why WAITFOR DELAY of 1 second? - -**Without delay:** -- Batch processing consumes 100% of I/O -- Application queries slow down -- Lock escalation may occur - -**With 1s delay:** -- Other queries have window to execute -- Distributed I/O -- User experience preserved - -**Trade-off:** Migration takes +1s per batch (~25% slower), but system remains responsive. - ---- - -## Current Status & Next Steps - -### Current Status (December 2024) - -**Preparation Phase:** -- Discovery complete (100M records identified) -- Migration scripts developed -- Tests in staging environment -- Performance validation in progress -- Awaiting production maintenance window - -### Next Steps - -1. **Complete production backup** -2. **Production execution** (24/7 environment) -3. **Real-time monitoring** during migration -4. **Post-migration validation** (integrity, performance) -5. **Lessons learned documentation** - ---- - -## Lessons Learned (So Far) - -### 1. Previous Experience is Gold - -Decision to use phased commits came from **practical experience** in previous projects, not from documentation or AI. - -**Similar previous situations:** -- E-commerce data migration (50M records) -- Encoding conversion (UTF-8 in 100M+ rows) -- Historical table partitioning - ---- - -### 2. "Measure Twice, Cut Once" - -Before executing in production: -- Exhaustive tests in staging -- Scripts validated and reviewed -- Rollback tested -- Time estimates confirmed - -**Preparation time:** 3 weeks -**Execution time:** Estimated at 48 hours - -**Ratio:** 10:1 (preparation vs execution) - ---- - -### 3. Customization > One-Size-Fits-All - -The original CNPJ Fast process needed to be **restructured** for this client. - -**Lesson:** Processes should be: -- Structured enough to repeat -- Flexible enough to adapt - ---- - -### 4. Monitoring is Crucial - -Scripts with **detailed progress logs** allow: -- Estimate remaining time -- Identify bottlenecks -- Pause/resume with confidence -- Report status to stakeholders - -```sql --- Log example -Processed: 10,000,000 rows. Batch: 100,000 -Elapsed time: 3600 seconds (10% complete, ~9h remaining) -``` - ---- - -## Performance Optimizations - -### Optimizations Implemented - -1. **Temporary index WHERE NULL** - - Speeds up lookup of unmigrated records - - Removed after completion - -2. **Optimized batch size** - - Balanced between performance and lock time - -3. **Transaction log management** - ```sql - -- Check log growth - DBCC SQLPERF(LOGSPACE); - - -- Adjust recovery model (if allowed) - ALTER DATABASE MyDatabase SET RECOVERY SIMPLE; - ``` - -4. **Execution during low-load hours** - - Overnight maintenance window - - Weekend (if possible) - ---- - -**Expected result:** Migration of 100 million records in ~48 hours, without significant downtime and with possibility of fast rollback. - -[Need to migrate massive data volumes? Get in touch](#contact) diff --git a/Content/Cases/en/industrial-learning-platform.md b/Content/Cases/en/industrial-learning-platform.md deleted file mode 100644 index cef11bb..0000000 --- a/Content/Cases/en/industrial-learning-platform.md +++ /dev/null @@ -1,588 +0,0 @@ ---- -title: "Industrial Training Platform - From Wireframes to Complete System" -slug: "industrial-learning-platform" -summary: "Solution Design for microlearning platform in industrial gas company. Identification of critical unmapped requirements (admin, registrations, exports) before client presentation, avoiding rework and ensuring real usability." -client: "Industrial Gas Company" -industry: "Industrial & Manufacturing" -timeline: "4 months" -role: "Solution Architect & Tech Lead" -image: "" -tags: - - Solution Design - - EdTech - - Learning Platform - - Requirements Analysis - - Tech Lead - - User Stories - - .NET - - Product Design -featured: true -order: 5 -date: 2024-06-01 -seo_title: "Industrial Training Platform - Solution Design" -seo_description: "Case study of Solution Design for microlearning platform, identifying critical requirements before client presentation and leading development to production." -seo_keywords: "solution design, learning platform, microlearning, requirements analysis, tech lead, industrial training" ---- - -## Overview - -Industrial gas company requests platform to train employees using **microlearning** methodology (short and objective content). - -**Initial requirement:** "We just want the structure - track, microlearning, test question and score." - -**Problem:** Incomplete specification that would result in a system **impossible to use** (no way to register content, no administrators, no export of results). - -**Solution:** Critical requirements analysis **before client presentation**, identifying functional gaps and proposing complete solution. - ---- - -## Challenge - -### Beautiful Wireframes, Incomplete Functionality - -**Initial situation:** - -UX created beautiful wireframes showing: -- Learning tracks -- Microlearnings (video/text + image) -- Test questions (multiple choice) -- Score per employee - -**Identified problem:** - -Nobody (client, UX, commercial) thought about: - -**How does content enter the system?** -- Who registers tracks? -- Who creates microlearnings? -- Who writes questions? -- Manual interface or import? - -**Who manages the system?** -- Is there admin concept? -- Can HR create admins? -- Can area manager see only their team? - -**How does data leave the system?** -- HR needs reports -- Compliance needs evidence -- How to export data? -- Format: Excel? PDF? API? - -**Real risk:** - -If we developed exactly what was requested: -- System would work technically -- **But would be completely unusable** -- Client would have to pay for rework to include basic CRUD -- Rework + additional cost + frustration - ---- - -## Solution Design Process - -### Step 1: Critical Analysis (Before Presentation) - -**Action taken:** Called meeting with UX **before** presenting to client. - -**Points raised:** - -**"How does the first content enter the system?"** -- UX: "Ah... we didn't think about that. Will you populate the database?" -- Me: "And when client wants to add new track? Will we modify production database?" - -**"Who is the system owner?"** -- UX: "HR, I imagine." -- Me: "Just one person? What if they leave the company? How do they delegate?" - -**"Did HR ask for reports?"** -- UX: "It wasn't mentioned in the briefing." -- Me: "HR always needs reports. It's for compliance (NR, ISO)." - ---- - -### Step 2: Identified Functional Requirements - -I proposed 4 additional **essential** modules: - -#### 1. Administration System - -**Features:** -- Standard user: Only takes training -- Admin user: Manages content + sees reports -- Admin can promote other users to admin -- Access control (general admin vs area admin) - -**Why it's critical:** -System is static without this (content never updates). - ---- - -#### 2. Content CRUD - -**a) Track Registration:** -- Track name -- Description -- Microlearning order -- Active/inactive track (allows unpublishing) - -**b) Microlearning Registration:** -- Title -- Type: Simple text (2 paragraphs) OR Video -- Image upload (if text) -- Video URL (if video) -- Order within track - -**c) Question Registration:** -- Question (text) -- 3 answer options: - - "Great" (green) - - "So-so" (yellow) - - "Poor" (red) -- Points per answer (e.g., 10, 5, 0 points) -- Custom feedback per answer - -**Why it's critical:** -Client needs to update content without calling dev/DBA. - ---- - -#### 3. Data Export - -**Features:** -- Export to Excel (.xlsx) -- Filters: - - By period (start/end date) - - By track - - By employee - - By area/department -- Exported columns: - - Employee name - - ID number - - Completed track - - Total score - - Completion date - - Individual answers (for audit) - -**Why it's critical:** -HR needs to evidence training for: -- Regulatory Norms (NR-13, NR-20 - flammable gases) -- ISO audits -- Labor lawsuits - ---- - -#### 4. User Management - -**Features:** -- Import employees (CSV/Excel upload) -- Manual registration -- Activate/deactivate users -- Assign mandatory tracks by area -- Pending notifications - -**Why it's critical:** -Company has 500+ employees, manual registration is unfeasible. - ---- - -### Step 3: Client Presentation - -**Approach:** - -1. Showed UX wireframes (beautiful interface) -2. Asked: "How will you register the first track?" -3. Client: "Ah... good question. We hadn't thought about that." -4. Presented the 4 additional modules -5. Client: "Makes total sense! Without this we can't use it." - -**Result:** -- Proposal approved **with additional modules** -- Adjusted scope (timeline + budget) -- Zero future rework -- Client recognized added value - ---- - -## Implementation - -### My Role in the Project - -**1. Solution Architect** -- Identification of non-functional requirements -- Architecture design (modules, integrations) -- Technology definition - -**2. Tech Lead** -- Technical team leadership (3 devs) -- Code review -- Code standards definition -- Technical risk management - -**3. Technical Product Owner** -- Creation of complete **user stories** -- Backlog prioritization -- Continuous refinement with client - ---- - -### Chosen Tech Stack - -**Backend:** -- `.NET 7` - REST APIs -- `Entity Framework Core` - ORM -- `SQL Server` - Database -- `ClosedXML` - Excel generation - -**Frontend:** -- `React` - Web interface -- `Material-UI` - Components -- `React Player` - Video player -- `Chart.js` - Progress charts - -**Infrastructure:** -- `Azure App Service` - Hosting -- `Azure Blob Storage` - Video/image storage -- `Azure SQL Database` - Managed database - ---- - -### Created User Stories - -I wrote **32 user stories** covering all flows. Examples: - -**US-01: Register Track (Admin)** -``` -As system administrator -I want to register a new training track -So that employees can take the courses - -Acceptance criteria: -- Admin accesses "Tracks" menu → "New Track" -- Fills in: Name, Description, Status (Active/Inactive) -- Can add existing microlearnings to track -- Defines microlearning order (drag & drop) -- System validates mandatory fields -- Saves and displays success message -``` - -**US-15: Complete Microlearning (Employee)** -``` -As employee -I want to complete a microlearning from my track -To learn about the topic and earn points - -Acceptance criteria: -- Employee accesses assigned track -- Sees list of microlearnings (uncompleted first) -- Clicks microlearning → opens screen with: - - Text (2 paragraphs) + Image OR - - Embedded video player -- "Continue" button appears after: - - 30s (if text) - - End of video (if video) -- Marks microlearning as seen -- Test question appears automatically -``` - -**US-22: Export Results (Admin)** -``` -As administrator -I want to export training results to Excel -To generate compliance and audit reports - -Acceptance criteria: -- Admin accesses "Reports" → "Export" -- Selects filters (period, track, area) -- Clicks "Generate Excel" -- System processes and downloads .xlsx file -- Excel contains columns: Name, ID, Track, Points, Date, Answers -- Readable format (bold headers, auto-adjusted columns) -``` - ---- - -## Key Features Implemented - -### 1. Gamified Scoring System - -**Mechanics:** -- Each question worth points (configurable) -- "Great" answer: 10 points -- "So-so" answer: 5 points -- "Poor" answer: 0 points - -**Employee dashboard:** -- Total score -- Ranking (optional, configurable) -- Badges for completed tracks -- Visual progress (% bar) - -**Why it works:** -Factory floor employees engage more with gamification elements. - ---- - -### 2. Adaptive Microlearning - -**Content types:** - -**Text + Image:** -- 2 paragraphs (max 300 words) -- 1 illustrative image -- Ideal for: Procedures, norms, concepts - -**Video:** -- Short videos (2-5 min) -- Embedded player (YouTube/Vimeo or upload) -- Ideal for: Demonstrations, equipment operations - -**Why microlearning?** -- Employees complete during breaks (10-15min) -- Short content = higher retention -- Facilitates updates (vs long courses) - ---- - -### 3. Delegated Administration System - -**Hierarchy:** - -``` -General Admin (HR) - ↓ can promote -Area Admin (Managers) - ↓ can view only -Employees from their area -``` - -**Permissions:** -- General admin: Creates tracks, promotes admins, sees all data -- Area admin: Sees only their area reports -- Employee: Only takes training - -**Audit:** -- Logs of who created/edited each content -- History of admin promotions -- SOX/ISO compliance - ---- - -### 4. Export for Compliance - -**Generated Excel format:** - -| ID | Name | Area | Track | Completion Date | Points | Status | -|-----------|------|------|--------|----------------|--------|--------| -| 1001 | John Silva | Production | NR-20 Safety | 11/15/2024 | 95/100 | Approved | -| 1002 | Mary Santos | Logistics | Gas Handling | 11/14/2024 | 78/100 | Approved | - -**Additional sheet: Answer Details** -- Allows audit: "Did employee X answer question Y correctly?" -- Evidence for labor lawsuits -- NR-13/NR-20 compliance - ---- - -## Results & Impact - -### System in Production - -**Current status:** In use for 4+ months - -**Adoption metrics:** -- 500+ registered employees -- 12 active tracks -- 150+ created microlearnings -- 8,000+ completed training sessions -- 100+ exported reports (compliance) - -**Completion rate:** 87% (industry average: 45%) - ---- - -### Client Impact - -**Before:** -- In-person training (high cost, difficult scheduling) -- Paper evidence (losses, difficult audit) -- Difficulty updating content - -**After:** -- Asynchronous training (employee completes when possible) -- Digital evidence (facilitated compliance) -- HR updates content without calling IT -- 70% reduction in training cost - -**Client feedback:** -> "If we had implemented only what we initially requested, the system would be useless. The pre-analysis saved the project." - ---- - -### Solution Design Value - -**ROI of pre-sale analysis:** - -**Scenario A (without analysis):** -1. Develop interface only (2 months) -2. Client tests and realizes CRUD is missing (1 month later) -3. Rework to add modules (2+ months) -4. **Total: 5+ months + client frustration** - -**Scenario B (with analysis - what we did):** -1. Identify requirements beforehand (1 week) -2. Approve complete scope (1 week) -3. Develop correct solution (4 months) -4. **Total: 4 months + satisfied client** - -**Savings:** 1+ month of rework + opportunity cost - ---- - -## Tech Stack - -`.NET 7` `C#` `Entity Framework Core` `SQL Server` `React` `Material-UI` `Azure App Service` `Azure Blob Storage` `ClosedXML` `Chart.js` `User Stories` `Solution Design` `Tech Lead` - ---- - -## Key Decisions & Trade-offs - -### Why not use ready-made LMS? (Moodle, Canvas) - -**Alternatives considered:** -1. Moodle (open-source, free) -2. Totara/Canvas (corporate LMS) -3. **Custom development** - -**Justification:** -- Generic LMS: Unnecessary complexity (forums, wikis, etc) -- Client wants **only microlearning** (simplicity) -- LMS license cost > custom dev cost -- Client AD/SSO integration (easier custom) -- UX optimized for factory floor (mobile-first, touch) - ---- - -### Why 3 answer options (vs 4-5)? - -**Choice:** Green (Great), Yellow (So-so), Red (Poor) - -**Justification:** -- Factory floor employees prefer simplicity -- Universal colors (traffic light) -- Avoids choice paradox (fewer options = more engagement) -- Clearer gamification - ---- - -### Why Excel Export (vs Online Dashboard)? - -**Both were implemented**, but Excel is critical for: - -**Regulatory compliance:** -- Auditors ask for "digitally signed file" -- NR-13/NR-20 require physical evidence -- Labor lawsuits accept Excel - -**Flexibility:** -- HR can do custom analyses in Excel -- Combine with other data sources -- Presentations for board - ---- - -## Lessons Learned - -### 1. Solution Design Prevents Rework - -**Lesson:** 1 week of critical analysis saves months of rework. - -**Application:** -- Always question incomplete specifications -- Think about "the day after" (who manages this in production?) -- Involve client in requirements discussions - ---- - -### 2. UX ≠ Functional Requirements - -**Lesson:** Beautiful wireframes don't replace requirements analysis. - -**UX focuses on:** How user **uses** the system -**Solution Design focuses on:** How system **works** end-to-end - -Both are necessary and complementary. - ---- - -### 3. Asking "How?" is More Important than "What?" - -**Client says:** "I want tracks and microlearnings" -**Solution Designer asks:** "How does the first track enter the system?" - -This simple question revealed 4 missing modules. - ---- - -### 4. Well-Written User Stories Accelerate Development - -**Investment:** 2 weeks writing 32 detailed user stories - -**Return:** -- Devs knew exactly what to build -- Zero ambiguity -- Very few bugs (clear requirements) -- Client validated stories before coding - -**Lesson:** Time spent planning reduces development time. - ---- - -### 5. Compliance is Hidden Requirement - -**In regulated industries** (health, energy, chemical), there will always be: -- Audit needs -- Evidence exports -- Logs of who did what - -**Lesson:** Ask about compliance **before**, not after. - ---- - -## Challenges Overcome - -| Challenge | Solution | Result | -|---------|---------|-----------| -| Incomplete specification | Pre-sale critical analysis | Correct scope from start | -| Client without technical knowledge | User stories in business language | Client validated requirements | -| Employees with low digital literacy | Simplified UX (3 buttons, colors) | 87% completion rate | -| NR-13/NR-20 compliance | Excel export with details | Approved in 2 audits | -| Managing 500+ users | CSV import + admin hierarchy | Onboarding in 1 week | - ---- - -## Next Steps (Future Roadmap) - -**Planned features:** - -1. **Push Notifications** - - Remind employee of pending training - - Notify of new mandatory track - -2. **Native Mobile App** - - Offline-first (downloaded videos) - - Employees without computer - -3. **Digital Certificates** - - Digitally signed PDF - - QR code for validation - -4. **Data Intelligence** - - Which microlearnings have most errors? - - Identify knowledge gaps by area - ---- - -**Result:** Functional system in production, satisfied client, zero rework - all because 1 week was invested in **thinking before coding**. - -[Need critical requirements analysis? Get in touch](#contact) diff --git a/Content/Cases/en/pharma-digital-transformation.md b/Content/Cases/en/pharma-digital-transformation.md deleted file mode 100644 index 6de611e..0000000 --- a/Content/Cases/en/pharma-digital-transformation.md +++ /dev/null @@ -1,577 +0,0 @@ ---- -title: "Pharma Lab Digital MVP - From Zero to Production" -slug: "pharma-digital-transformation" -summary: "Squad leadership in greenfield project for pharmaceutical lab, building digital platform MVP with complex integrations (Salesforce, Twilio, official APIs) starting from absolute zero - no Git, no servers, no infrastructure." -client: "Pharmaceutical Laboratory" -industry: "Pharmaceutical & Healthcare" -timeline: "4 months (2-month planned delay)" -role: "Tech Lead & Solution Architect" -image: "" -tags: - - MVP - - Digital Transformation - - .NET - - React - - Next.js - - Salesforce - - Twilio - - SQL Server - - Tech Lead - - Greenfield -featured: true -order: 3 -date: 2023-03-01 -seo_title: "Pharma Digital MVP - Digital Transformation from Scratch" -seo_description: "Case study of building digital MVP for pharmaceutical lab from scratch: no Git, no infrastructure, with complex integrations and successful delivery." -seo_keywords: "MVP, digital transformation, pharma, .NET, React, Next.js, Salesforce, greenfield project, tech lead" ---- - -## Overview - -Pharmaceutical laboratory at the **beginning of digital transformation** hires consulting firm to build discount platform for prescribing physicians, starting from WordPress prototype. - -**Unique challenge:** Start greenfield project in company **without basic development infrastructure** - no Git, no provisioned servers, no defined processes. - -**Context:** Project executed in multi-squad environment. **Successful production delivery** despite initial infrastructure challenges, with controlled 2-month delay. - ---- - -## Challenge - -### Digital Transformation... Starting from Absolute Zero - -**Company initial state (2023):** - -**No Git/versioning** -- Code only on local machines -- Non-existent history -- Impossible collaboration - -**No provisioned servers** -- Non-existent development environment -- Staging not configured -- Production not prepared - -**No development processes** -- No CI/CD -- No code review -- No structured task management - -**No experienced internal technical team** -- Team unfamiliar with modern stacks -- First contact with React, REST APIs -- Inexperience with complex integrations - -**Technical starting point:** -- Functional prototype in **WordPress** -- Content and texts already approved -- UX/UI defined -- Business rules documented (partially) - ---- - -### Required Complex Integrations - -The MVP needed to integrate with multiple external systems: - -1. **Salesforce** - Discount order registration -2. **Twilio** - SMS for login validation (2FA) -3. **Official physician API** - CRM validation + professional data -4. **Interplayers** - Discount record sending by CPF -5. **WordPress** - Content reading (headless CMS) -6. **SQL Server** - Data persistence - -**Additional complexity:** -- Different credentials/environments per integration -- Varying SLAs (Twilio critical, WordPress tolerant) -- Provider-specific error handling -- LGPD compliance (sensitive physician data) - ---- - -## Solution Architecture - -### Strategy: Start Small, Build Solid - -**Initial decision:** Explain to the team the process we would follow, establishing foundations before coding. - -### Phase 1: Basic Infrastructure Setup (Weeks 1-2) - -Even without provisioned servers, I started essential setup: - -**Git & Versioning:** -```bash -# Structured repository from day 1 -git init -git flow init # Defined branch strategy - -# Monorepo structure -/ -├── frontend/ # Next.js + React -├── backend/ # .NET APIs -├── cms-adapter/ # WordPress integration -└── docs/ # Architecture and ADRs -``` - -**Process explained to team:** -1. Everything in Git (atomic commits, descriptive messages) -2. Feature branches (never commit directly to main) -3. Mandatory code review (2 approvals) -4. CI/CD prepared (for when servers are ready) - -**Local environments first:** -- Docker Compose for local development -- External API mocks (until credentials arrive) -- Local SQL Server with data seeds - ---- - -### Phase 2: Modern & Decoupled Architecture - -``` -┌─────────────────────────────────────────────────────┐ -│ FRONTEND (Next.js + React) │ -│ - SSR for SEO │ -│ - Client-side for interactivity │ -│ - API consumption │ -└────────────┬────────────────────────────────────────┘ - │ - ▼ -┌─────────────────────────────────────────────────────┐ -│ BACKEND APIs (.NET 7) │ -│ - REST APIs │ -│ - Authentication/Authorization │ -│ - Business logic │ -│ - Orchestration layer │ -└────┬────┬────┬────┬────┬─────────────────────────┬──┘ - │ │ │ │ │ │ - ▼ ▼ ▼ ▼ ▼ ▼ -┌────────┐ ┌──────┐ ┌──────┐ ┌────────┐ ┌────────┐ ┌──────────┐ -│Salesf. │ │Twilio│ │CRM │ │Interpl.│ │WordPr. │ │SQL Server│ -│ │ │ │ │API │ │ │ │(CMS) │ │ │ -└────────┘ └──────┘ └──────┘ └────────┘ └────────┘ └──────────┘ -``` - -**Chosen stack:** - -**Frontend:** -- `Next.js 13` - SSR, routing, optimizations -- `React 18` - Components, hooks, context -- `TypeScript` - Type safety -- `Tailwind CSS` - Modern styling - -**Backend:** -- `.NET 7` - REST APIs -- `Entity Framework Core` - ORM -- `SQL Server 2019` - Database -- `Polly` - Resilience patterns (retry, circuit breaker) - -**Why Next.js instead of keeping WordPress?** -- Performance (SSR vs monolithic PHP) -- Optimized SEO (critical for pharma) -- Modern experience (SPA when needed) -- Scalability -- WordPress kept only as CMS (headless) - ---- - -### Phase 3: Integrations (Project Core) - -#### 1. Salesforce - Campaigns and Order Registration - -**Implemented solution:** - -Salesforce was configured to manage two main functionalities: - -**a) Discount campaigns:** -- Marketing configures campaigns in Salesforce (medication X, discount Y%, period) -- Backend queries active campaigns via API -- Frontend (Next.js) displays available discount percentage based on: medication + active campaign - -**b) Order registration:** -- User informs: physician CRM, state, patient CPF, medication -- System validates data (real CRM via official API, valid CPF) -- Percentage calculated automatically (Salesforce campaigns + CMS rules) -- Order registered in Salesforce with all data (LGPD compliance) - -**Overcome technical challenges:** -- OAuth2 authentication with automatic refresh token -- Rate limiting (Salesforce has API/day limits) -- Retry logic for transient failures (Polly) -- CPF masking for logs (LGPD) - ---- - -#### 2. Twilio - SMS Authentication (2FA) - -**Implemented solution:** - -Two-factor authentication system to ensure security: - -**Login flow:** -1. User enters phone number -2. Backend generates 6-digit code (valid for 5 minutes) -3. SMS sent via Twilio ("Your code: 123456") -4. User enters code in frontend -5. Backend validates code + expiration timestamp -6. JWT token issued after successful validation - -**Compliance and audit:** -- Phone numbers masked in logs (LGPD) -- Complete audit (who, when, which SMS) -- Delivery rate: 99.8% - ---- - -#### 3. Official Physician API (Regional Medical Council) - -**Implemented solution:** - -Automatic physician validation via official medical council API: - -**Performed validations:** -- CRM exists and is active in council -- Physician name matches informed CRM -- Specialty is allowed (lab business rule) -- State corresponds to registration state - -**Optimizations:** -- 24-hour cache to reduce official API calls -- Fallback if API is down (notifies admin) -- Automatic retry for transient failures - -**Why this matters:** -Ensures only real and active physicians can prescribe discounts, avoiding fraud. - ---- - -#### 4. WordPress as Headless CMS - -**Implemented solution:** - -Marketing continues managing content in WordPress (familiar), but frontend is modern Next.js. - -**Architecture:** -- WordPress: Manages texts, images, campaign rules -- WordPress REST API: Exposes content via JSON -- Next.js: Consumes API and renders with SSR (SEO optimized) - -**Benefits:** -- Marketing doesn't need to learn new tool -- Modern frontend (performance, UX) -- Optimized SEO (Server-Side Rendering) -- Clear separation of responsibilities (content vs code) - ---- - -### Phase 4: Resilience & Error Handling - -With multiple external integrations, failures are inevitable. The solution was to implement **resilience patterns** using Polly library (.NET): - -**Implemented patterns:** - -**1. Retry** -- If Salesforce/Twilio/CRM API fail, system automatically retries 2-3x -- Wait grows exponentially (1s, 2s, 4s) to avoid overload -- Only transient errors (timeout, 503) are retried - -**2. Circuit Breaker** -- If service fails 5x in a row, "opens circuit" for 30s -- During 30s, doesn't try anymore (avoids wasting resources) -- After 30s, tries again (may have recovered) - -**3. Timeout** -- Each integration has maximum response time -- Avoids indefinitely stuck requests - -**4. Fallback (Plan B)** -- Salesforce down: Order goes to queue, processes later -- Twilio down: Alert administrator via email -- CRM API down: Uses cache (24h old data) -- WordPress down: Displays pre-loaded static content - -**Strategies per integration:** - -| Integration | Retry | Circuit Breaker | Timeout | Plan B | -|----------|-------|-----------------|---------|----------| -| Salesforce | 3x (exponential) | 5 failures/30s | 10s | Retry queue | -| Twilio | 2x (linear) | 3 failures/60s | 5s | Admin alert | -| CRM API | 3x (exponential) | No | 15s | Cache | -| WordPress | No | No | 3s | Static content | - -**Production result:** -- Salesforce had maintenance (1h) → System continued working (queue processed later) -- Twilio had instability → Automatic retry resolved 95% of cases -- Zero downtime perceived by users - ---- - -## Overcoming Infrastructure Challenges - -### Problem: Servers Not Provisioned - -**Temporary solution:** -1. 100% local development (Docker Compose) -2. External service mocks (when credentials delayed) -3. CI/CD prepared but not active (awaiting infra) - -**When servers arrived (week 6):** -- Deploy in 2 hours (already prepared) -- Zero surprises (everything tested locally) -- Smooth rollout - ---- - -### Problem: Delayed Integration Credentials - -**Impact:** Twilio and Salesforce took 3 weeks to be provisioned. - -**Solution:** Create "mock" (simulated) versions of each integration: -- Twilio mock: Logs instead of sending real SMS -- Salesforce mock: Saves order to local JSON file -- CRM API mock: Returns fictional physician data - -**How it works:** -- Development environment: Uses mocks (no credentials needed) -- Production environment: Uses real integrations (when credentials arrive) -- Automatic switch based on configuration - -**Result:** Team stayed 100% productive for 3 weeks, testing complete flows without depending on credentials. - ---- - -### Problem: Team Inexperienced with Modern Stack - -**Context:** Team had no experience with React, TypeScript, modern .NET Core, REST APIs. - -**Enablement approach:** - -**1. Pair Programming (1h/day per developer)** -- Tech lead works alongside dev -- Screen sharing + real-time explanation -- Dev writes code, tech lead guides - -**2. Educational Code Review** -- Not just "approve" or "reject" -- Comments explain the **why** of each suggestion -- Example: "Always handle request errors! If API crashes, user needs to know what happened." - -**3. Living Documentation** -- ADRs (Architecture Decision Records): Why did we choose X and not Y? -- READMEs: How to run, test, deploy -- Onboarding guide: From zero to first feature - -**4. Weekly Live Coding (2h)** -- Tech lead solves real problem live -- Team observes thinking process -- Q&A at end - -**Result:** -- After 4 weeks, team was autonomous -- Code quality consistently increased -- Devs started doing code review among themselves (peer review) - ---- - -## Results & Impact - -### Successful Delivery Despite Challenges - -**Context:** Program with multiple squads working in parallel. - -**Achieved result:** -- **MVP delivered to production successfully** -- Controlled 2-month delay (significantly less than other program initiatives) -- All integrations working as planned -- Zero critical bugs in production (first week) - -**Why was delivery successful?** - -1. **Anticipated setup** - Git, processes, local Docker from day 1 -2. **Strategic mocks** - Team wasn't blocked waiting for infra -3. **Solid architecture** - Resilience from the start -4. **Continuous upskilling** - Team learned by doing -5. **Proactive communication** - Risks reported early - ---- - -### MVP Metrics - -**Performance:** -- Loading time: <2s (95th percentile) -- Lighthouse score: 95+ (mobile) -- SSL A+ rating - -**Integrations:** -- Salesforce: 100% orders synchronized -- Twilio: 99.8% delivery rate -- CRM API: 10k validations/day (average) -- SQL Server: 50k records/month - -**Adoption:** -- 2,000+ registered physicians (first 3 months) -- 15,000+ processed discount orders -- 4.8/5 satisfaction (internal survey) - ---- - -### Client Impact - -**Digital transformation initiated:** -- Git implemented and adopted -- Established development processes -- Internal team enabled in modern stacks -- Cloud infrastructure configured (Azure) -- Evolution roadmap defined - -**Foundation for future projects:** -- Architecture served as reference for other initiatives -- Documented code patterns (coding standards) -- Reused CI/CD pipelines - ---- - -## Tech Stack - -`.NET 7` `C#` `Entity Framework Core` `SQL Server` `React 18` `Next.js 13` `TypeScript` `Tailwind CSS` `Salesforce API` `Twilio` `WordPress REST API` `Docker` `Polly` `OAuth2` `JWT` `LGPD Compliance` - ---- - -## Key Decisions & Trade-offs - -### Why Next.js instead of pure React? - -**Requirements:** -- Critical SEO (pharma needs to rank) -- Performance (physicians use mobile) -- Dynamic content (WordPress) - -**Next.js offers:** -- SSR out-of-the-box -- API routes (BFF pattern) -- Automatic optimizations (image, fonts) -- Simplified deploy (Vercel, Azure) - ---- - -### Why keep WordPress? - -**Alternatives considered:** -1. Migrate content to database + custom CMS (time) -2. Strapi/Contentful (costs + learning curve) -3. **WordPress headless** (best trade-off) - -**Advantages:** -- Marketing team already knows how to use -- Approved content was already there -- WordPress REST API is solid -- Zero cost (already running) - ---- - -### Why .NET 7 instead of Node.js? - -**Context:** Client had preference for Microsoft stack. - -**Additional benefits:** -- Superior performance (vs Node in APIs) -- Native type safety (C#) -- Entity Framework (mature ORM) -- Easy Azure integration (future deploy) -- Client team had familiarity - ---- - -## Lessons Learned - -### 1. Infrastructure Delayed? Prepare Alternatives - -Don't wait for servers/credentials to start: -- Local Docker is your friend -- Mocks allow progress -- CI/CD can be prepared before having where to deploy - -**Lesson:** Control what you can control. - ---- - -### 2. Processes > Tools - -Even without corporate Git, I established: -- Branching strategy -- Code review -- Commit conventions -- Documentation standards - -**Result:** When tools arrived, team already knew how to use them. - ---- - -### 3. Upskilling is Investment, Not Cost - -Pair programming and code reviews took time, but: -- Team became autonomous faster -- Code quality increased -- Natural knowledge sharing -- Simplified onboarding of new devs - ---- - -### 4. Resilience from the Start - -Implementing Polly (retry, circuit breaker) at the start saved in production: -- Twilio had instability (resolved automatically) -- Salesforce had maintenance (queue worked) -- CRM API had slowness (cache mitigated) - -**Lesson:** Don't leave resilience for "later". Failures will happen. - ---- - -### 5. Clear Risk Communication - -I reported weekly: -- Blockers (infrastructure, credentials) -- Risks (deadlines, dependencies) -- Alternative solutions (mocks, workarounds) - -**Result:** Stakeholders knew exact status and had no surprises. - ---- - -## Challenges & How They Were Overcome - -| Challenge | Impact | Solution | Result | -|---------|---------|---------|-----------| -| No Git | Total blocker | Local setup + GitLab Cloud | Team productive day 1 | -| No servers | No dev environment | Local Docker Compose | Complete local dev/test | -| Delayed credentials | Integration blocked | Mock services | Progress without blocker | -| Inexperienced team | Low quality code | Pair prog + Code review | Ramp-up in 4 weeks | -| Multiple integrations | High complexity | Polly + patterns | Zero prod downtime | - ---- - -## Next Steps (Post-MVP) - -**Roadmap suggested to client:** - -1. **Phase 2: Feature expansion** - - Dashboard for physicians (order history) - - Push notifications (Firebase) - - E-commerce integration (direct purchase) - -2. **Phase 3: Optimizations** - - Distributed cache (Redis) - - CDN for static assets - - Advanced analytics (Amplitude) - -3. **Phase 4: Scale** - - Kubernetes (AKS) - - Microservices (break monolith) - - Event-driven architecture (Azure Service Bus) - ---- - -**Result:** MVP delivered to production despite starting literally from zero, establishing solid foundations for client's digital transformation. - -[Need to build an MVP in a challenging scenario? Get in touch](#contact) diff --git a/Content/Cases/en/sap-integration-healthcare.md b/Content/Cases/en/sap-integration-healthcare.md deleted file mode 100644 index 4d38fc5..0000000 --- a/Content/Cases/en/sap-integration-healthcare.md +++ /dev/null @@ -1,211 +0,0 @@ ---- -title: "SAP Healthcare Integration System" -slug: "sap-integration-healthcare" -summary: "Bidirectional integration processing 100k+ transactions/day with 99.9% uptime" -client: "Confidential - Healthcare Multinational" -industry: "Healthcare" -timeline: "6 months" -role: "Integration Architect" -image: "" -tags: - - SAP - - C# - - .NET - - Integrations - - Enterprise - - Healthcare -featured: true -order: 1 -date: 2023-06-15 -seo_title: "Case Study: SAP Healthcare Integration - 100k Transactions/Day" -seo_description: "How we architected SAP integration system processing 100k+ daily transactions with 99.9% uptime for healthcare company." -seo_keywords: "SAP integration, C#, .NET, SAP Connector, enterprise integration, healthcare" ---- - -## Overview - -**Client:** Healthcare Multinational (confidential) -**Size:** 100,000+ employees -**Project:** Benefits integration -**Timeline:** 6 months -**My Role:** Integration Architect - ---- - -## Challenge - -Client had internal benefits management system that needed to sync with SAP ECC to process payroll. - -### Main pain points: -- Manual process prone to errors -- 3-5 day delay between systems -- 100k employees waiting for processing -- Load spikes (month-end) - -### Constraints: -- Limited budget (no SAP BTP) -- Small internal SAP team (2 developers) -- Tight deadline (6-month go-live) -- Legacy .NET Framework 4.5 system - ---- - -## Solution - -Bidirectional integration architecture: - -``` -[Internal System] ←→ [Queue] ←→ [SAP Connector] ←→ [SAP ECC] - ↓ ↓ - [MongoDB Logs] [ABAP Z_BENEFITS] -``` - -### Components: -- .NET Service with SAP Connector (NCo 3.0) -- Custom ABAP transaction (Z_BENEFITS) -- Queue system (RabbitMQ) for retry logic -- MongoDB for audit and troubleshooting -- Scheduler (Hangfire) for batch processing - -### Flow: -1. System generates changes (new hires, modifications) -2. Service processes batch (500 records/batch) -3. SAP Connector calls Z_BENEFITS via RFC -4. SAP returns status (success/error) -5. Automatic retry if failure (max 3x) -6. MongoDB logs for troubleshooting - ---- - -## Results - -### Metrics: -- **100k+** transactions/day processed -- **99.9%** uptime -- Reduced **5 days → 4 hours** (delay) -- **80%** reduction in processing time -- **Zero** manual errors (vs 2-3% before) - -### Benefits: -- Employees receive benefits on-time -- HR team saves 40h/month (manual work) -- Complete audit (compliance) -- Scalable (30% year-over-year growth without refactor) - ---- - -## Tech Stack - -`C#` `.NET Framework 4.5` `SAP NCo 3.0` `RabbitMQ` `MongoDB` `Hangfire` `Docker` `SAP ECC` `ABAP` `RFC` - ---- - -## Decisions & Motivation - -### Decision 1: SAP Connector vs SAP BTP - -**Options evaluated:** -- SAP BTP (events, modern APIs, cloud) -- SAP Connector (direct RFC, on-premise) - -**We chose:** SAP Connector - -**Motivation:** -- Client had on-premise SAP ECC (not S/4) -- Budget didn't allow BTP license -- SAP team comfortable with ABAP/RFC -- Needs met with RFC (didn't need real-time event-driven) - -**Accepted trade-off:** -- Less "modern" than BTP, but 100% functional -- $0 additional cost vs $30k+/year BTP -- 2 months faster delivery (no BTP learning curve) - ---- - -### Decision 2: Queue System vs Direct Calls - -**Options evaluated:** -- Direct synchronous calls (simpler) -- Queue with retry (more complex) - -**We chose:** Queue + Retry - -**Motivation:** -- SAP occasionally unavailable (maintenance) -- Load spikes (month-end = 200k requests) -- Ensure zero data loss -- Resilience > simplicity (critical environment) - -**Implementation:** -- RabbitMQ with dead-letter queue -- Exponential retry (1min, 5min, 15min) -- Alerts if 3 consecutive failures - -**Result:** -- Zero data loss in 2 years production -- HR team doesn't need to "keep watch" - ---- - -### Decision 3: Custom ABAP vs Standard - -**Options evaluated:** -- Standard SAP BAPIs (zero ABAP code) -- Custom transaction (Z_BENEFITS) - -**We chose:** Custom transaction - -**Motivation:** -- Standard BAPIs didn't have business-specific validations -- Client wanted logic centralized in SAP (single source of truth) -- Allowed complex validations (eligibility, dependents, limits) - -**Trade-off:** -- Requires ABAP maintenance (internal SAP team) -- But: Client preferred vs duplicate logic (risk of desync) - ---- - -### Alternatives NOT Chosen - -**Webhook/Callback (Event-Driven):** -- Client had no infrastructure to expose APIs -- Internal system behind firewall -- Batch polling works well for the case - -**Kubernetes Microservices:** -- Overkill for single integration -- Team had no K8s expertise -- Simple Docker sufficient - -**Real-time Sync (<1min):** -- Business doesn't need (daily batch ok) -- Infrastructure cost would increase 3x -- 4h delay acceptable for payroll - ---- - -## Learnings - -### What worked very well: -- Involve SAP team from day 1 (buy-in) -- MongoDB for logs (10x faster troubleshooting) -- Retry logic saved countless times - -### What I would do differently: -- Add health check endpoint earlier -- Monitoring dashboard from start (added later) - -### Lessons for next projects: -- Client "limited budget" ≠ "limited solution" - creativity solves -- Document ALL architectural decisions (team turnover) -- Simplicity beats complexity when both work (KISS) - ---- - -## Need Something Similar? - -Complex SAP integrations, legacy systems, or high-availability architecture? - -[Let's talk about your challenge →](/#contact) diff --git a/Content/Cases/es/asp-to-dotnet-migration.md b/Content/Cases/es/asp-to-dotnet-migration.md deleted file mode 100644 index 3f219ae..0000000 --- a/Content/Cases/es/asp-to-dotnet-migration.md +++ /dev/null @@ -1,329 +0,0 @@ ---- -title: "Migración ASP 3.0 a .NET Core - Sistema de Rastreo de Cargas" -slug: "asp-to-dotnet-migration" -summary: "Tech Lead en la migración gradual de sistema crítico ASP 3.0 a .NET Core, con sincronización de datos entre versiones y reducción de costos de $20k/año en APIs de mapeo." -client: "Empresa de Logística y Rastreo" -industry: "Logística & Seguridad" -timeline: "12 meses (migración completa)" -role: "Tech Lead & Solution Architect" -image: "" -tags: - - ASP Classic - - .NET Core - - SQL Server - - Migration - - Tech Lead - - OSRM - - APIs - - Arquitectura -featured: true -order: 2 -date: 2015-06-01 -seo_title: "Migración ASP 3.0 a .NET Core - Case Carneiro Tech" -seo_description: "Caso de migración gradual de aplicación ASP 3.0 a .NET Core con sincronización de datos y reducción de $20k/año en costos de APIs." -seo_keywords: "ASP migration, .NET Core, legacy modernization, SQL Server, OSRM, tech lead, routing API" ---- - -## Descripción General - -Sistema crítico de monitoreo de cargas de alto valor (TVs LED de $600 cada una, cargamentos de hasta 1000 unidades) utilizando rastreo GPS vía satélite. La aplicación cubría todo el ciclo: desde registro y evaluación de conductores (verificación de antecedentes policiales) hasta monitoreo en tiempo real y entrega final. - -**Desafío principal:** Migrar aplicación legacy ASP 3.0 a .NET Core sin downtime, manteniendo operación crítica 24/7. - ---- - -## Desafío - -### Sistema Legacy Crítico - -La empresa operaba un sistema mission-critical en **ASP 3.0** (Classic ASP) que no podía detenerse: - -**Tecnología legacy:** -- ASP 3.0 (tecnología de 1998) -- SQL Server 2005 -- Cluster failover on-premises (perfectamente capaz de soportar la carga) -- Integración con rastreadores GPS vía satélite -- Google Maps API (costo: **$20,000/año** solo para cálculo de rutas) - -**Restricciones:** -- Sistema operando 24/7 con cargas de alto valor -- Imposibilidad de downtime durante migración -- Múltiples módulos interdependientes -- Equipo necesitaba continuar desarrollando features durante la migración - ---- - -## Arquitectura de Solución - -### Fase 1: Preparación de Infraestructura (Meses 1-3) - -#### Upgrade de Base de Datos -``` -SQL Server 2005 → SQL Server 2014 -- Backup completo y validación -- Migración de stored procedures -- Optimización de índices -- Pruebas de performance -``` - -#### Estrategia de Sincronización Dual-Write - -Implementé un **sistema de sincronización bidireccional** que permitía: - -1. **Módulos nuevos (.NET Core)** escribían en la base de datos nueva -2. **Trigger automático** sincronizaba datos hacia la base de datos legacy -3. **Módulos antiguos (ASP 3.0)** continuaban funcionando normalmente -4. **Zero downtime** durante toda la migración - -```csharp -// Ejemplo de sincronización implementada -public class DualWriteService -{ - public async Task SaveDriver(Driver driver) - { - // Escribe en base de datos nueva (.NET Core) - await _newDbContext.Drivers.AddAsync(driver); - await _newDbContext.SaveChangesAsync(); - - // Trigger SQL sincroniza automáticamente hacia base de datos legacy - // Módulos ASP 3.0 continúan funcionando - } -} -``` - -**¿Por qué este enfoque?** -- Permitió migración **módulo por módulo** -- Equipo podía continuar desarrollando -- Rollback sencillo si fuera necesario -- Reducción de riesgo operacional - ---- - -### Fase 2: Migración Gradual de Módulos (Meses 4-12) - -Migré los módulos en orden de complejidad creciente: - -**Orden de migración:** -1. ✅ Registros básicos (conductores, vehículos) -2. ✅ Evaluación de riesgo (integración con base policial) -3. ✅ Gestión de cargas y rutas -4. ✅ Monitoreo GPS en tiempo real -5. ✅ Alertas y notificaciones -6. ✅ Reportes y analytics - -**Stack de la aplicación migrada:** -- `.NET Core 1.0` (2015-2016 era el inicio de .NET Core) -- `Entity Framework Core` -- `SignalR` para monitoreo real-time -- `SQL Server 2014` -- APIs RESTful - ---- - -### Fase 3: Reducción de Costos con OSRM (Ahorro de $20k/año) - -#### Problema: Costo Prohibitivo de Google Maps - -La empresa gastaba **$20,000/año** solo en Google Maps Directions API para cálculo de rutas de camiones. - -#### Solución: OSRM (Open Source Routing Machine) - -Implementé una solución basada en **OSRM** (motor de ruteo open-source): - -**Arquitectura de la solución:** - -``` -┌─────────────────┐ -│ Frontend │ -│ (Leaflet.js) │ -└────────┬────────┘ - │ - ▼ -┌─────────────────┐ ┌──────────────┐ -│ API Wrapper │─────▶│ OSRM Server │ -│ (.NET Core) │ │ (self-hosted)│ -└────────┬────────┘ └──────────────┘ - │ - ▼ -┌─────────────────┐ -│ Google Maps │ -│ (display only) │ -└─────────────────┘ -``` - -**Implementación:** - -1. **Servidor OSRM configurado** en servidor propio -2. **API wrapper amigable** en .NET Core que: - - Recibía origen/destino - - Consultaba OSRM (gratuito) - - Devolvía todos los puntos de la ruta - - Formateaba para el frontend -3. **Frontend** dibujaba la ruta en Google Maps (solo visualización, sin API de rutas) - -```csharp -[HttpGet("route")] -public async Task GetRoute(double originLat, double originLng, - double destLat, double destLng) -{ - // Consulta OSRM (gratuito) - var osrmResponse = await _osrmClient.GetRouteAsync( - originLat, originLng, destLat, destLng); - - // Retorna puntos formateados para el frontend - return Ok(new { - points = osrmResponse.Routes[0].Geometry.Coordinates, - distance = osrmResponse.Routes[0].Distance, - duration = osrmResponse.Routes[0].Duration - }); -} -``` - -**Frontend con Leaflet:** - -```javascript -// Dibuja ruta en el mapa (Google Maps solo para tiles) -L.polyline(routePoints, {color: 'red'}).addTo(map); -``` - -#### Intento con OpenStreetMap - -Intenté sustituir también Google Maps (tiles) por **OpenStreetMap**, que funcionó técnicamente, pero: - -❌ **A los usuarios no les gustó** la apariencia -❌ Preferían la interfaz familiar de Google Maps - -✅ **Decisión:** Mantener Google Maps solo para visualización (sin costo de API de rutas) - -**Resultado:** Ahorro de **~$20,000/año** manteniendo calidad de las rutas. - ---- - -## Resultados e Impacto - -### Migración Completa en 12 Meses - -✅ **100% de los módulos** migrados de ASP 3.0 a .NET Core -✅ **Zero downtime** durante toda la migración -✅ **Equipo productivo** durante todo el proceso -✅ Sistema más rápido y escalable - -### Reducción de Costos - -💰 **$20,000/año ahorrados** con sustitución de Google Maps Directions API -📉 **Infraestructura optimizada** con SQL Server 2014 - -### Mejoras Técnicas - -🚀 **Performance:** Aplicación .NET Core 3x más rápida que ASP 3.0 -🔒 **Seguridad:** Stack moderno con parches de seguridad activos -🛠️ **Mantenibilidad:** Código C# moderno vs VBScript legacy -📊 **Monitoreo:** SignalR para tracking real-time más eficiente - ---- - -## Fase No Ejecutada: Microservicios & Cloud - -### Planificación Inicial - -Participé en el **diseño y concepción** de la segunda fase (nunca ejecutada): - -**Arquitectura planificada:** -- Migración a **Azure** (cloud estaba apenas comenzando en 2015) -- División en **microservicios**: - - Servicio de autenticación - - Servicio de GPS/tracking - - Servicio de rutas - - Servicio de notificaciones -- **Event-driven architecture** con message queues - -**Por qué no fue ejecutada:** - -Salí de la empresa inmediatamente después de concluir la migración a .NET Core. La segunda fase quedó planificada pero no fue implementada por mí. - ---- - -## Tech Stack - -`ASP 3.0` `VBScript` `.NET Core 1.0` `C#` `Entity Framework Core` `SQL Server 2005` `SQL Server 2014` `OSRM` `Leaflet.js` `Google Maps` `SignalR` `REST APIs` `GPS/Satellite` `Migration Strategy` `Dual-Write Pattern` - ---- - -## Decisiones Clave & Trade-offs - -### ¿Por qué sincronización dual-write? - -**Alternativas consideradas:** -1. ❌ Big Bang migration (demasiado arriesgado) -2. ❌ Mantener todo en ASP 3.0 (insostenible) -3. ✅ **Migración gradual con sync** (elegido) - -**Justificación:** -- Sistema crítico no podía detenerse -- Permitió rollback módulo por módulo -- Equipo continuó productivo - -### ¿Por qué OSRM en vez de otros? - -**Alternativas:** -- Google Maps: $20k/año ❌ -- Mapbox: Licencia paga ❌ -- GraphHopper: Configuración compleja ❌ -- **OSRM: Open-source, rápido, configurable** ✅ - -### ¿Por qué no OpenStreetMap para tiles? - -**Decisión basada en UX:** -- Técnicamente funcionó perfectamente -- Usuarios preferían interfaz familiar de Google -- **Compromiso:** Google Maps para visualización (gratis) + OSRM para rutas (gratis) - ---- - -## Lecciones Aprendidas - -### 1. Migración Gradual > Big Bang - -Migrar módulo por módulo con sincronización permitió: -- Aprendizaje continuo -- Ajustes de ruta durante el proceso -- Confianza del equipo y stakeholders - -### 2. Open Source Puede Ahorrar Mucho - -OSRM ahorró **$20k/año** sin pérdida de calidad. Pero requiere: -- Expertise para configurar -- Infraestructura propia -- Mantenimiento continuo - -### 3. UX > Tecnología A Veces - -OpenStreetMap era técnicamente superior (gratuito), pero usuarios prefirieron Google Maps. **Lección:** Escuchar a los usuarios finales. - -### 4. Planifique Cloud, pero Valide el ROI - -En 2015, cloud estaba comenzando. La infraestructura on-premises (cluster SQL Server) era perfectamente capaz. **No fuerce cloud si no hay beneficio claro.** - ---- - -## Contexto: Por qué 2015 fue un Momento Especial - -**Estado de la tecnología en 2015:** - -- ☁️ **Cloud en pañales:** AWS existía, Azure creciendo, pero adopción corporativa aún baja -- 🆕 **.NET Core 1.0 lanzado** en junio/2016 (usamos RC durante proyecto) -- 📱 **Microservicios:** Concepto nuevo, Docker en adopción inicial -- 🗺️ **Google Maps dominante:** APIs pagas, pocas alternativas open-source maduras - -**Desafíos de la época:** -- Herramientas de migración ASP→.NET inexistentes -- Documentación .NET Core escasa (versión 1.0!) -- Patrones de arquitectura aún consolidándose - -Este proyecto fue **pionero** al adoptar .NET Core al inicio, cuando la mayoría migraba a .NET Framework 4.x. - ---- - -**Resultado:** Migración exitosa de sistema crítico 24/7, ahorro de $20k/año, y base sólida para evolución futura. - -[¿Quiere discutir una migración similar? Póngase en contacto](#contact) diff --git a/Content/Cases/es/cnpj-fast-process.md b/Content/Cases/es/cnpj-fast-process.md deleted file mode 100644 index f793301..0000000 --- a/Content/Cases/es/cnpj-fast-process.md +++ /dev/null @@ -1,382 +0,0 @@ ---- -title: "CNPJ Fast - Proceso de Migración a CNPJ Alfanumérico" -slug: "cnpj-fast-process" -summary: "Creación de metodología estructurada para migración de aplicaciones al nuevo formato de CNPJ alfanumérico brasileño, vendida a aseguradora y empresa de cobranza." -client: "Empresa de Consultoría (Interno)" -industry: "Consultoría & Transformación Digital" -timeline: "3 meses (creación del proceso)" -role: "Solution Architect & Process Designer" -image: "" -tags: - - Process Design - - CNPJ - - Migration Strategy - - Regulatory Compliance - - Consulting - - Sales Enablement -featured: true -order: 3 -date: 2024-09-01 -seo_title: "CNPJ Fast - Metodología de Migración CNPJ Alfanumérico" -seo_description: "Caso de creación de proceso estructurado para migración a CNPJ alfanumérico brasileño, vendido a aseguradora y empresa de cobranza." -seo_keywords: "CNPJ alfanumérico, migration process, regulatory compliance, consulting, methodology" ---- - -## Descripción General - -Con la introducción del **CNPJ alfanumérico** por la Receita Federal brasileña, las empresas enfrentaban el desafío de adaptar sus aplicaciones legacy que almacenaban CNPJ como campos numéricos (`bigint`, `numeric`, `int`). - -Creé **CNPJ Fast**, una metodología estructurada para evaluar, planificar y ejecutar migraciones de CNPJ en aplicaciones y bases de datos corporativas. - -**Resultado:** Proceso vendido a **2 clientes** (aseguradora y empresa de cobranza) antes incluso de la implementación. - ---- - -## Desafío - -### Cambio Regulatorio Complejo - -**Contexto regulatorio:** -- Receita Federal brasileña introdujo **CNPJ alfanumérico** -- CNPJ deja de ser solo números (14 dígitos) -- Pasa a aceptar **letras y números** (formato alfanumérico) - -**Impacto en las empresas:** - -```sql --- ANTES: CNPJ numérico -CNPJ BIGINT -- 12345678000190 - --- DESPUÉS: CNPJ alfanumérico -CNPJ VARCHAR(18) -- 12.ABC.678/0001-90 -``` - -**Problemas identificados:** - -1. 🗄️ **Base de datos:** Columnas `BIGINT`, `NUMERIC`, `INT` no soportan caracteres -2. 🔑 **Claves primarias:** CNPJ usado como PK en varias tablas -3. 🔗 **Foreign keys:** Relaciones entre tablas -4. 📊 **Volumen:** Millones de registros para migrar -5. 💻 **Aplicaciones:** Validaciones, máscaras, reglas de negocio -6. 🧪 **Pruebas:** Garantizar integridad después de migración -7. ⏱️ **Downtime:** Ventanas de mantenimiento limitadas - -**Sin un proceso estructurado**, empresas arriesgaban: -- Pérdida de datos -- Inconsistencias en la base de datos -- Aplicaciones rotas -- Downtime prolongado - ---- - -## Solución: CNPJ Fast Process - -### Metodología en 5 Fases - -Diseñé un proceso estructurado que podría ser replicado en diferentes clientes: - -``` -┌─────────────────────────────────────────────┐ -│ FASE 1: DISCOVERY & ASSESSMENT │ -│ - Inventario de aplicaciones │ -│ - Análisis de schemas de base de datos │ -│ - Identificación de tablas impactadas │ -│ - Estimación de volumen de datos │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ FASE 2: IMPACT ANALYSIS │ -│ - Mapeo de dependencias │ -│ - Análisis de claves primarias/foráneas │ -│ - Identificación de reglas de negocio │ -│ - Evaluación de riesgo │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ FASE 3: MIGRATION PLANNING │ -│ - Estrategia de migración (phased commits) │ -│ - Scripts SQL automatizados │ -│ - Plan de rollback │ -│ - Ventanas de mantenimiento │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ FASE 4: EXECUTION │ -│ - Migración de datos en lotes │ -│ - Actualización de aplicaciones │ -│ - Pruebas de integración │ -│ - Validación de integridad │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ FASE 5: VALIDATION & GO-LIVE │ -│ - Pruebas de regresión │ -│ - Validación de performance │ -│ - Go-live coordinado │ -│ - Monitoreo post-migración │ -└─────────────────────────────────────────────┘ -``` - ---- - -### Fase 1: Discovery & Assessment - -**Objetivo:** Entender el alcance completo de la migración - -**Entregables:** - -1. **Inventario de Aplicaciones** - - Lista de aplicaciones que usan CNPJ - - Tecnologías (ASP 3.0, VB6, .NET, microservicios) - - Criticidad de cada aplicación - -2. **Análisis de Schema** - ```sql - -- Script de descubrimiento automático - SELECT - t.TABLE_SCHEMA, - t.TABLE_NAME, - c.COLUMN_NAME, - c.DATA_TYPE, - c.CHARACTER_MAXIMUM_LENGTH - FROM INFORMATION_SCHEMA.TABLES t - JOIN INFORMATION_SCHEMA.COLUMNS c - ON t.TABLE_NAME = c.TABLE_NAME - WHERE c.COLUMN_NAME LIKE '%CNPJ%' - AND c.DATA_TYPE IN ('bigint', 'numeric', 'int') - ORDER BY t.TABLE_SCHEMA, t.TABLE_NAME; - ``` - -3. **Estimación de Volumen** - - Total de registros por tabla - - Tamaño en GB - - Tiempo estimado de migración - -**Ejemplo de output:** - -| Tabla | Columna | Tipo Actual | Registros | Criticidad | -|--------|--------|------------|-----------|-------------| -| Clientes | CNPJ_Cliente | BIGINT | 8.000.000 | Alta | -| Proveedores | CNPJ_Proveedor | NUMERIC(14) | 2.500.000 | Media | -| Transacciones | CNPJ_Pagador | BIGINT | 90.000.000 | Crítica | - ---- - -### Fase 2: Impact Analysis - -**Objetivo:** Mapear todas las dependencias y riesgos - -**Análisis de claves:** - -```sql --- Identifica PKs y FKs que involucran CNPJ -SELECT - fk.name AS FK_Name, - tp.name AS Parent_Table, - cp.name AS Parent_Column, - tr.name AS Referenced_Table, - cr.name AS Referenced_Column -FROM sys.foreign_keys fk -INNER JOIN sys.tables tp ON fk.parent_object_id = tp.object_id -INNER JOIN sys.foreign_key_columns fkc ON fk.object_id = fkc.constraint_object_id -INNER JOIN sys.columns cp ON fkc.parent_column_id = cp.column_id - AND fkc.parent_object_id = cp.object_id -INNER JOIN sys.tables tr ON fk.referenced_object_id = tr.object_id -INNER JOIN sys.columns cr ON fkc.referenced_column_id = cr.column_id - AND fkc.referenced_object_id = cr.object_id -WHERE cp.name LIKE '%CNPJ%' OR cr.name LIKE '%CNPJ%'; -``` - -**Evaluación de Riesgo:** - -- 🔴 **Alto:** Tablas con CNPJ como PK y >10M registros -- 🟡 **Medio:** Tablas con FK hacia CNPJ -- 🟢 **Bajo:** Tablas sin constraints - ---- - -### Fase 3: Migration Planning - -**Estrategia de migración gradual:** - -Para evitar bloqueo de base de datos, diseñé estrategia de **phased commits**: - -```sql --- Estrategia para tablas grandes (>1M registros) - --- 1. Agregar nueva columna VARCHAR -ALTER TABLE Clientes -ADD CNPJ_Cliente_New VARCHAR(18) NULL; - --- 2. Migración en lotes (commits faseados) -DECLARE @BatchSize INT = 100000; -DECLARE @RowsAffected INT = 1; - -WHILE @RowsAffected > 0 -BEGIN - UPDATE TOP (@BatchSize) Clientes - SET CNPJ_Cliente_New = FORMAT(CNPJ_Cliente, '00000000000000') - WHERE CNPJ_Cliente_New IS NULL; - - SET @RowsAffected = @@ROWCOUNT; - WAITFOR DELAY '00:00:01'; -- Pausa entre lotes -END; - --- 3. Remover constraints (PKs, FKs) -ALTER TABLE Clientes DROP CONSTRAINT PK_Clientes; - --- 4. Renombrar columnas -EXEC sp_rename 'Clientes.CNPJ_Cliente', 'CNPJ_Cliente_Old', 'COLUMN'; -EXEC sp_rename 'Clientes.CNPJ_Cliente_New', 'CNPJ_Cliente', 'COLUMN'; - --- 5. Recrear constraints -ALTER TABLE Clientes -ADD CONSTRAINT PK_Clientes PRIMARY KEY (CNPJ_Cliente); - --- 6. Remover columna antigua (tras validación) -ALTER TABLE Clientes DROP COLUMN CNPJ_Cliente_Old; -``` - -**¿Por qué este enfoque?** - -- ✅ Evita lock de tabla entera -- ✅ Permite pausar/reanudar migración -- ✅ Minimiza impacto en producción -- ✅ Facilita rollback si es necesario - ---- - -### Fase 4 & 5: Execution y Validation - -**Checklist de ejecución:** - -- [ ] Backup completo de la base de datos -- [ ] Ejecutar scripts de migración en lotes -- [ ] Actualizar aplicaciones (validaciones, máscaras) -- [ ] Pruebas de integración -- [ ] Validación de integridad referencial -- [ ] Pruebas de performance -- [ ] Go-live coordinado -- [ ] Monitoreo 24h post-migración - ---- - -## Sales Enablement: Presentación UX - -**Colaboración con Gestor de UX:** - -El gestor de UX de la empresa creó una **presentación visual impactante** del proceso CNPJ Fast: - -**Contenido de la presentación:** -- 📊 Infografías del proceso de 5 fases -- 📈 Ejemplos de estimaciones de tiempo/costo -- 🎯 Casos de uso (aseguradoras, bancos, fintechs) -- ✅ Checklist ejecutivo -- 📋 Templates de documentación - -**Resultado:** Presentación utilizada por el equipo comercial para prospección. - ---- - -## Resultados e Impacto - -### Ventas Realizadas - -**Cliente 1: Aseguradora** -- Stack: ASP 3.0, VB6 components, .NET, microservicios -- Alcance: Migración completa de aplicaciones legacy -- Estado: **Proyecto vendido** (ejecución por otro equipo) -- Valor: [Confidencial] - -**Cliente 2: Empresa de Cobranza** -- Alcance: Migración de base de datos (~100M registros) -- Estado: **Proyecto vendido y en ejecución** (por mí) -- Particularidad: Proceso fue **reestructurado** para atender necesidades específicas -- Ver caso completo: [Migración CNPJ - 100M Registros](/cases/cnpj-migration-database) - ---- - -### Impacto en el Negocio - -💰 **2 proyectos vendidos** antes incluso de la primera ejecución -📈 **Proceso replicable** para nuevos clientes -🎯 **Posicionamiento** como especialista en migraciones regulatorias -📚 **Base de conocimiento** para futuros proyectos similares - ---- - -### Impacto Técnico - -🔧 **Metodología probada** en escenarios reales -📖 **Documentación reutilizable** (scripts, checklists, templates) -🚀 **Aceleración** de proyectos similares (de semanas a días) - ---- - -## Tech Stack - -`SQL Server` `Migration Strategy` `Process Design` `Regulatory Compliance` `ASP 3.0` `VB6` `.NET` `Microservices` `Batch Processing` `Database Optimization` - ---- - -## Decisiones Clave & Trade-offs - -### ¿Por qué proceso estructurado? - -**Alternativas:** -1. ❌ Enfoque ad-hoc por proyecto -2. ❌ Consultoría manual sin metodología -3. ✅ **Proceso replicable y escalable** - -**Justificación:** -- Reduce tiempo de Discovery -- Estandariza entregas -- Facilita ventas (presentación lista) -- Permite ejecución por diferentes equipos - -### ¿Por qué separar en 5 fases? - -**Beneficios:** -- Cliente puede aprobar fase a fase -- Permite ajustes durante el proceso -- Facilita gestión de riesgos -- Entregas incrementales - ---- - -## Lecciones Aprendidas - -### 1. UX/Presentación Importa para Ventas - -La presentación visual hecha por el gestor de UX fue **crucial** para cerrar los 2 contratos. Proceso técnico bueno + presentación mala = sin ventas. - -### 2. Proceso Vende, No Solo Ejecución - -Crear una **metodología documentada** tiene más valor comercial que solo ofrecer "horas de consultoría". - -### 3. Cada Cliente es Único - -El cliente solicitó **reestructuración del proceso**. Un buen proceso debe ser: -- Estructurado lo suficiente para ser replicable -- Flexible lo suficiente para personalizar - -### 4. Colaboración Multidisciplinaria - -Trabajar con gestor de UX (presentaciones) + equipo comercial (ventas) + técnico (ejecución) = éxito. - ---- - -## Próximos Pasos - -**Oportunidades futuras:** - -1. 🌎 **Expansión:** Ofrecer CNPJ Fast para más sectores (bancos, fintechs, retail) -2. 📦 **Producto:** Transformar en herramienta automatizada (SaaS) -3. 📚 **Capacitación:** Capacitar equipos internos de clientes -4. 🔄 **Evolución:** Adaptar proceso para otras migraciones regulatorias (PIX, Open Banking) - ---- - -**Resultado:** Metodología estructurada que se convirtió en producto vendible, generando ingresos antes incluso de la primera ejecución técnica. - -[¿Quiere implementar CNPJ Fast en su empresa? Póngase en contacto](#contact) diff --git a/Content/Cases/es/cnpj-migration-database.md b/Content/Cases/es/cnpj-migration-database.md deleted file mode 100644 index 974879d..0000000 --- a/Content/Cases/es/cnpj-migration-database.md +++ /dev/null @@ -1,469 +0,0 @@ ---- -title: "Migración CNPJ Alfanumérico - 100 Millones de Registros" -slug: "cnpj-migration-database" -summary: "Ejecución de migración masiva de CNPJ numérico a alfanumérico en base de datos con ~100M registros, usando estrategia de commits faseados para evitar bloqueo." -client: "Empresa de Cobranza" -industry: "Cobranza & Servicios Financieros" -timeline: "En ejecución" -role: "Database Architect & Tech Lead" -image: "" -tags: - - SQL Server - - Database Migration - - CNPJ - - Performance Optimization - - Batch Processing - - Big Data -featured: true -order: 4 -date: 2024-11-01 -seo_title: "Migración CNPJ Alfanumérico - 100M Registros | Carneiro Tech" -seo_description: "Caso de migración masiva de CNPJ en base de datos con 100 millones de registros usando commits faseados y optimizaciones de performance." -seo_keywords: "database migration, SQL Server, CNPJ, batch processing, performance optimization, phased commits" ---- - -## Descripción General - -Una empresa de cobranza que trabaja con bases de datos de información transitoria (sin software propietario) necesita adaptar sus sistemas al nuevo formato de **CNPJ alfanumérico** brasileño. - -**Desafío principal:** Migrar ~**100 millones de registros** en tablas con columnas `BIGINT` y `NUMERIC` a `VARCHAR`, sin bloquear la base de datos en producción. - -**Estado:** Proyecto en ejecución (preparación de scripts de migración). - ---- - -## Desafío - -### Volumen Masivo de Datos - -**Contexto de la empresa:** -- Empresa de cobranza (no desarrolla software propio) -- Trabaja con **datos transitorios** (alta rotación) -- Base de datos SQL Server con volumen crítico - -**Análisis inicial reveló:** - -| Tabla | Columna | Tipo Actual | Registros | Tamaño | -|--------|--------|------------|-----------|---------| -| Deudores | CNPJ_Deudor | BIGINT | 8.000.000 | 60 GB | -| Transacciones | CNPJ_Pagador | NUMERIC(14) | 90.000.000 | 1.2 TB | -| Empresas | CNPJ_Empresa | BIGINT | 2.500.000 | 18 GB | -| **TOTAL** | - | - | **~100.000.000** | **~1.3 TB** | - -**Problemas identificados:** - -1. 🔴 **Tablas con 8M+ líneas** usando `BIGINT` para CNPJ -2. 🔴 **90 millones de registros** en tabla de transacciones -3. 🔑 **CNPJ como clave primaria** en algunas tablas -4. 🔗 **Foreign keys** relacionando múltiples tablas -5. ⚠️ **Imposibilidad de downtime prolongado** (operación 24/7) -6. 💾 **Restricciones de espacio** en disco (necesita estrategia eficiente) - ---- - -## Decisión Estratégica: Phased Commits - -### ¿Por qué NO hacer ALTER COLUMN directo? - -**Enfoque ingenuo (NO funciona):** - -```sql --- ❌ NUNCA HAGA ESTO EN TABLAS GRANDES -ALTER TABLE Transacciones -ALTER COLUMN CNPJ_Pagador VARCHAR(18); -``` - -**Problemas:** -- 🔒 Bloquea la tabla entera durante la conversión -- ⏱️ Puede tomar horas/días en tablas grandes -- 💥 Bloquea todas las operaciones (INSERT, UPDATE, SELECT) -- 🚨 Riesgo de timeout o falla en medio de la operación -- 🔙 Rollback complejo si algo sale mal - ---- - -### Estrategia Elegida: Column Swap con Commits Faseados - -**Basado en experiencia anterior**, decidí usar enfoque gradual: - -``` -┌─────────────────────────────────────────────┐ -│ 1. Crear nueva columna VARCHAR al FINAL │ -│ (operación rápida, no bloquea tabla) │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ 2. UPDATE en lotes (commits faseados) │ -│ - 100k registros a la vez │ -│ - Pausa entre lotes (evita contención) │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ 3. Remover PKs y FKs │ -│ (tras 100% migrado) │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ 4. Renombrar columnas (swap) │ -│ - CNPJ → CNPJ_Old │ -│ - CNPJ_New → CNPJ │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ 5. Recrear PKs/FKs con nueva columna │ -└─────────────────────────────────────────────┘ - ▼ -┌─────────────────────────────────────────────┐ -│ 6. Validación y eliminación columna vieja │ -└─────────────────────────────────────────────┘ -``` - -**¿Por qué este enfoque?** - -✅ **Sin lock de tabla completa** (operación incremental) -✅ **Puede pausar/reanudar** en cualquier momento -✅ **Monitoreo de progreso** en tiempo real -✅ **Rollback simple** (basta eliminar nueva columna) -✅ **Minimiza impacto en producción** (commits pequeños) - -**Decisión tomada basada en:** -- 📚 Experiencia anterior con migraciones de gran volumen -- 🔍 Conocimiento de locks de SQL Server -- 🎯 Necesidad de zero downtime - -**Nota:** Esta decisión fue tomada **sin consultar IA** - basada puramente en experiencia práctica de proyectos anteriores. - ---- - -## Detalles de Implementación - -### Fase 1: Crear Nueva Columna - -```sql --- Operación rápida (metadata change solamente) -ALTER TABLE Transacciones -ADD CNPJ_Pagador_New VARCHAR(18) NULL; - --- Agrega índice temporal para acelerar lookups -CREATE NONCLUSTERED INDEX IX_Temp_CNPJ_New -ON Transacciones(CNPJ_Pagador_New) -WHERE CNPJ_Pagador_New IS NULL; -``` - -**Tiempo estimado:** ~1 segundo (independiente del tamaño de la tabla) - ---- - -### Fase 2: Migración en Lotes (Core Strategy) - -```sql --- Script de migración con commits faseados -DECLARE @BatchSize INT = 100000; -- 100k registros por lote -DECLARE @RowsAffected INT = 1; -DECLARE @TotalProcessed INT = 0; -DECLARE @StartTime DATETIME = GETDATE(); - -WHILE @RowsAffected > 0 -BEGIN - BEGIN TRANSACTION; - - -- Actualiza lote de 100k registros aún no migrados - UPDATE TOP (@BatchSize) Transacciones - SET CNPJ_Pagador_New = RIGHT('00000000000000' + CAST(CNPJ_Pagador AS VARCHAR), 14) - WHERE CNPJ_Pagador_New IS NULL; - - SET @RowsAffected = @@ROWCOUNT; - SET @TotalProcessed = @TotalProcessed + @RowsAffected; - - COMMIT TRANSACTION; - - -- Log de progreso - PRINT 'Processed: ' + CAST(@TotalProcessed AS VARCHAR) + ' rows. Batch: ' + CAST(@RowsAffected AS VARCHAR); - PRINT 'Elapsed time: ' + CAST(DATEDIFF(SECOND, @StartTime, GETDATE()) AS VARCHAR) + ' seconds'; - - -- Pausa entre lotes (reduce contención) - WAITFOR DELAY '00:00:01'; -- 1 segundo entre lotes -END; - -PRINT 'Migration completed! Total rows: ' + CAST(@TotalProcessed AS VARCHAR); -``` - -**Parámetros configurables:** - -- `@BatchSize`: 100k (balanceado entre performance y lock time) - - Muy pequeño = muchas transacciones, overhead - - Muy grande = lock prolongado, impacto en prod -- `WAITFOR DELAY`: 1 segundo (da tiempo a otras queries para ejecutar) - -**Estimaciones de tiempo:** - -| Registros | Batch Size | Tiempo Estimado | -|-----------|------------|----------------| -| 8.000.000 | 100.000 | ~2-3 horas | -| 90.000.000 | 100.000 | ~20-24 horas | - -**Ventajas:** -- ✅ No bloquea aplicación -- ✅ Otras queries pueden ejecutar entre los lotes -- ✅ Puede pausar (Ctrl+C) y reanudar después (WHERE NULL toma desde donde paró) -- ✅ Log de progreso en tiempo real - ---- - -### Fase 3: Remoción de Constraints - -```sql --- Identifica todas las PKs y FKs que involucran la columna -SELECT name -FROM sys.key_constraints -WHERE type = 'PK' - AND parent_object_id = OBJECT_ID('Transacciones') - AND COL_NAME(parent_object_id, parent_column_id) = 'CNPJ_Pagador'; - --- Remueve PKs -ALTER TABLE Transacciones -DROP CONSTRAINT PK_Transacciones_CNPJ; - --- Remueve FKs (tablas que referencian) -ALTER TABLE Pagos -DROP CONSTRAINT FK_Pagos_Transacciones; -``` - -**Tiempo estimado:** ~10 minutos (depende de cuántas constraints existen) - ---- - -### Fase 4: Column Swap (Renombramiento) - -```sql --- Renombra columna antigua a _Old -EXEC sp_rename 'Transacciones.CNPJ_Pagador', 'CNPJ_Pagador_Old', 'COLUMN'; - --- Renombra nueva columna al nombre original -EXEC sp_rename 'Transacciones.CNPJ_Pagador_New', 'CNPJ_Pagador', 'COLUMN'; - --- Altera a NOT NULL (tras validación de 100% completado) -ALTER TABLE Transacciones -ALTER COLUMN CNPJ_Pagador VARCHAR(18) NOT NULL; -``` - -**Tiempo estimado:** ~1 segundo (metadata change) - ---- - -### Fase 5: Recreación de Constraints - -```sql --- Recrea PK con nueva columna VARCHAR -ALTER TABLE Transacciones -ADD CONSTRAINT PK_Transacciones_CNPJ -PRIMARY KEY CLUSTERED (CNPJ_Pagador); - --- Recrea FKs -ALTER TABLE Pagos -ADD CONSTRAINT FK_Pagos_Transacciones -FOREIGN KEY (CNPJ_Pagador) REFERENCES Transacciones(CNPJ_Pagador); -``` - -**Tiempo estimado:** ~30-60 minutos (depende del volumen) - ---- - -### Fase 6: Validación y Limpieza - -```sql --- Valida que 100% fue migrado -SELECT COUNT(*) -FROM Transacciones -WHERE CNPJ_Pagador IS NULL OR CNPJ_Pagador = ''; - --- Valida integridad referencial -DBCC CHECKCONSTRAINTS WITH ALL_CONSTRAINTS; - --- Si todo OK, remueve columna antigua -ALTER TABLE Transacciones -DROP COLUMN CNPJ_Pagador_Old; - --- Remueve índice temporal -DROP INDEX IX_Temp_CNPJ_New ON Transacciones; -``` - ---- - -## Personalización del Proceso CNPJ Fast - -### Diferencias vs. Proceso Original - -El proceso **CNPJ Fast** original fue **reestructurado** para este cliente: - -**Cambios principales:** - -| Aspecto | CNPJ Fast Original | Cliente (Personalizado) | -|---------|-------------------|---------------------| -| **Foco** | Aplicaciones + DB | Solo DB (sin software propio) | -| **Discovery** | Inventario de apps | Solo análisis de schema | -| **Ejecución** | Múltiples aplicaciones | Scripts SQL masivos | -| **Batch Size** | 50k-100k | 100k (optimizado para volumen) | -| **Monitoreo** | Manual + herramientas | Logs SQL en tiempo real | -| **Rollback** | Proceso complejo | Simple (DROP COLUMN) | - -**Motivo de la reestructuración:** -- Cliente no tiene aplicaciones propias (solo consume datos) -- Foco 100% en optimización de base de datos -- Volumen mucho mayor que casos típicos (100M vs ~10M) - ---- - -## Tech Stack - -`SQL Server` `T-SQL` `Batch Processing` `Performance Tuning` `Database Optimization` `Migration Scripts` `Phased Commits` `Index Optimization` `Constraint Management` - ---- - -## Decisiones Clave & Trade-offs - -### ¿Por qué 100k por batch? - -**Pruebas de performance:** - -| Batch Size | Tiempo/Batch | Lock Duration | Contención | -|------------|-------------|---------------|-----------| -| 10.000 | 2s | Bajo | ✅ Mínimo | -| 50.000 | 8s | Medio | ✅ Aceptable | -| **100.000** | 15s | **Medio** | **✅ Balanceado** | -| 500.000 | 90s | Alto | ❌ Impacto en prod | -| 1.000.000 | 180s | Muy alto | ❌ Inaceptable | - -**Elección:** 100k ofrece mejor balance entre performance e impacto. - ---- - -### ¿Por qué crear columna al FINAL? - -**Internals de SQL Server:** -- Agregar columna al final = metadata change (rápido) -- Agregar en medio = reescritura de páginas (lento) -- Para tablas grandes, posición importa - ---- - -### ¿Por qué WAITFOR DELAY de 1 segundo? - -**Sin delay:** -- ❌ Batch processing consume 100% del I/O -- ❌ Queries de aplicación se vuelven lentas -- ❌ Lock escalation puede ocurrir - -**Con delay de 1s:** -- ✅ Otras queries tienen ventana para ejecutar -- ✅ I/O distribuido -- ✅ Experiencia del usuario preservada - -**Trade-off:** Migración toma +1s por batch (~25% más lenta), pero sistema permanece responsivo. - ---- - -## Estado Actual & Próximos Pasos - -### Estado Actual (Diciembre 2024) - -📝 **Fase de Preparación:** -- ✅ Discovery completo (100M registros identificados) -- ✅ Scripts de migración desarrollados -- ✅ Pruebas en ambiente de homologación -- 🔄 Validación de performance -- ⏳ Esperando ventana de mantenimiento para producción - -### Próximos Pasos - -1. **Backup completo** de producción -2. **Ejecución en producción** (ambiente 24/7) -3. **Monitoreo en tiempo real** durante migración -4. **Validación post-migración** (integridad, performance) -5. **Documentación de lessons learned** - ---- - -## Lecciones Aprendidas (Hasta Ahora) - -### 1. Experiencia Anterior Vale Oro - -Decisión de usar phased commits vino de **experiencia práctica** en proyectos anteriores, no de documentación o IA. - -**Situaciones similares anteriores:** -- Migración de datos en e-commerce (50M registros) -- Conversión de encoding (UTF-8 en 100M+ rows) -- Particionamiento de tablas históricas - ---- - -### 2. "Measure Twice, Cut Once" - -Antes de ejecutar en producción: -- ✅ Pruebas exhaustivas en homologación -- ✅ Scripts validados y revisados -- ✅ Rollback probado -- ✅ Estimaciones de tiempo confirmadas - -**Tiempo de preparación:** 3 semanas -**Tiempo de ejecución:** Estimado en 48 horas - -**Ratio:** 10:1 (preparación vs ejecución) - ---- - -### 3. Personalización > One-Size-Fits-All - -El proceso CNPJ Fast original necesitó ser **reestructurado** para este cliente. - -**Lección:** Los procesos deben ser: -- Estructurados lo suficiente para repetir -- Flexibles lo suficiente para adaptar - ---- - -### 4. Monitoreo es Crucial - -Scripts con **log detallado** de progreso permiten: -- Estimar tiempo restante -- Identificar cuellos de botella -- Pausar/reanudar con confianza -- Reportar estado a stakeholders - -```sql --- Log example -Processed: 10.000.000 rows. Batch: 100.000 -Elapsed time: 3600 seconds (10% complete, ~9h remaining) -``` - ---- - -## Optimizaciones de Performance - -### Optimizaciones Implementadas - -1. **Índice temporal WHERE NULL** - - Acelera lookup de registros no migrados - - Removido tras conclusión - -2. **Batch size optimizado** - - Balanceado entre performance y lock time - -3. **Transaction log management** - ```sql - -- Verificar crecimiento del log - DBCC SQLPERF(LOGSPACE); - - -- Ajustar recovery model (si permitido) - ALTER DATABASE MyDatabase SET RECOVERY SIMPLE; - ``` - -4. **Ejecución en horario de menor carga** - - Ventana de mantenimiento nocturna - - Fin de semana (si es posible) - ---- - -**Resultado esperado:** Migración de 100 millones de registros en ~48 horas, sin downtime significativo y con posibilidad de rollback rápido. - -[¿Necesita migrar volúmenes masivos de datos? Póngase en contacto](#contact) diff --git a/Content/Cases/es/industrial-learning-platform.md b/Content/Cases/es/industrial-learning-platform.md deleted file mode 100644 index d00ec2e..0000000 --- a/Content/Cases/es/industrial-learning-platform.md +++ /dev/null @@ -1,588 +0,0 @@ ---- -title: "Plataforma de Capacitación Industrial - De Wireframes a Sistema Completo" -slug: "industrial-learning-platform" -summary: "Solution Design para plataforma de microlearning en empresa de gases industriales. Identificación de requisitos críticos no mapeados (admin, registros, exportación) antes de la presentación al cliente, evitando retrabajo y garantizando usabilidad real." -client: "Empresa de Gases Industriales" -industry: "Industrial & Manufactura" -timeline: "4 meses" -role: "Solution Architect & Tech Lead" -image: "" -tags: - - Solution Design - - EdTech - - Learning Platform - - Requirements Analysis - - Tech Lead - - User Stories - - .NET - - Product Design -featured: true -order: 5 -date: 2024-06-01 -seo_title: "Plataforma de Capacitación Industrial - Solution Design" -seo_description: "Caso de Solution Design para plataforma de microlearning, identificando requisitos críticos antes de la presentación al cliente y liderando desarrollo hasta producción." -seo_keywords: "solution design, learning platform, microlearning, requirements analysis, tech lead, industrial training" ---- - -## Descripción General - -Empresa de gases industriales solicita plataforma para capacitar empleados usando metodología de **microlearning** (contenidos cortos y objetivos). - -**Requisito inicial:** "Queremos solo la estructura - ruta de aprendizaje, microlearning, pregunta de test y puntuación." - -**Problema:** Especificación incompleta que resultaría en sistema **imposible de usar** (sin forma de registrar contenido, sin administradores, sin exportar resultados). - -**Solución:** Análisis crítico de requisitos **antes de la presentación al cliente**, identificando gaps funcionales y proponiendo solución completa. - ---- - -## Desafío - -### Wireframes Bonitos, Funcionalidad Incompleta - -**Situación inicial:** - -UX creó wireframes hermosos mostrando: -- ✅ Rutas de aprendizaje -- ✅ Microlearnings (video/texto + imagen) -- ✅ Preguntas de test (opción múltiple) -- ✅ Puntuación por empleado - -**Problema identificado:** - -Nadie (cliente, UX, comercial) pensó en: - -❌ **¿Cómo entra contenido en el sistema?** -- ¿Quién registra rutas? -- ¿Quién crea microlearnings? -- ¿Quién escribe preguntas? -- ¿Interfaz manual o import? - -❌ **¿Quién gestiona el sistema?** -- ¿Existe concepto de admin? -- ¿RRHH puede crear admins? -- ¿Gestor de área puede ver solo su equipo? - -❌ **¿Cómo salen datos del sistema?** -- RRHH necesita reportes -- Compliance necesita evidencias -- ¿Cómo exportar datos? -- ¿Formato: Excel? PDF? API? - -**Riesgo real:** - -Si desarrolláramos exactamente lo que fue pedido: -- Sistema funcionaría técnicamente ✅ -- **Pero sería completamente inutilizable** ❌ -- Cliente tendría que pagar refacción para incluir CRUD básico -- Retrabajo + costo adicional + frustración - ---- - -## Proceso de Solution Design - -### Etapa 1: Análisis Crítico (Antes de la Presentación) - -**Acción tomada:** Convoqué reunión con UX **antes** de presentar al cliente. - -**Puntos levantados:** - -**"¿Cómo entra el primer contenido al sistema?"** -- UX: "Ah... no pensamos en eso. ¿Ustedes van a poblar la base de datos?" -- Yo: "¿Y cuando cliente quiera agregar nueva ruta? ¿Vamos a alterar BD en producción?" - -**"¿Quién es el dueño del sistema?"** -- UX: "RRHH, imagino." -- Yo: "¿Solo una persona? ¿Y si sale de la empresa? ¿Cómo delega?" - -**"¿RRHH pidió reportes?"** -- UX: "No fue mencionado en el briefing." -- Yo: "RRHH siempre necesita reportes. Es para compliance (NR, ISO)." - ---- - -### Etapa 2: Requisitos Funcionales Identificados - -Propuse 4 módulos adicionales **esenciales**: - -#### 1. Sistema de Administración - -**Funcionalidades:** -- Usuario estándar: Solo realiza capacitaciones -- Usuario admin: Gestiona contenido + ve reportes -- Admin puede promover otros usuarios a admin -- Control de acceso (admin general vs admin de área) - -**Por qué es crítico:** -Sin esto, sistema es estático (contenido nunca se actualiza). - ---- - -#### 2. CRUD de Contenido - -**a) Registro de Rutas:** -- Nombre de la ruta -- Descripción -- Orden de los microlearnings -- Ruta activa/inactiva (permite despublicar) - -**b) Registro de Microlearnings:** -- Título -- Tipo: Texto simple (2 párrafos) O Video -- Upload de imagen (si texto) -- URL de video (si video) -- Orden dentro de la ruta - -**c) Registro de Preguntas:** -- Pregunta (texto) -- 3 opciones de respuesta: - - "Excelente" (verde) - - "Regular" (amarillo) - - "Malo" (rojo) -- Puntuación por respuesta (ej: 10, 5, 0 puntos) -- Feedback personalizado por respuesta - -**Por qué es crítico:** -Cliente necesita actualizar contenido sin llamar a dev/DBA. - ---- - -#### 3. Exportación de Datos - -**Funcionalidades:** -- Exportar a Excel (.xlsx) -- Filtros: - - Por período (fecha inicio/fin) - - Por ruta - - Por empleado - - Por área/departamento -- Columnas exportadas: - - Nombre del empleado - - Matrícula - - Ruta completada - - Puntuación total - - Fecha de conclusión - - Respuestas individuales (para auditoría) - -**Por qué es crítico:** -RRHH necesita evidenciar capacitación para: -- Normas Reglamentarias (NR-13, NR-20 - gases inflamables) -- Auditorías ISO -- Procesos laborales - ---- - -#### 4. Gestión de Usuarios - -**Funcionalidades:** -- Importar empleados (upload CSV/Excel) -- Registro manual -- Activar/desactivar usuarios -- Asignar rutas obligatorias por área -- Notificaciones de pendientes - -**Por qué es crítico:** -Empresa tiene 500+ empleados, registro manual es inviable. - ---- - -### Etapa 3: Presentación al Cliente - -**Abordaje:** - -1. Mostré wireframes del UX (interfaz bonita) -2. Pregunté: "¿Cómo van a registrar la primera ruta?" -3. Cliente: "Ah... buena pregunta. No habíamos pensado en eso." -4. Presenté los 4 módulos adicionales -5. Cliente: "Tiene total sentido! Sin esto no podemos usar." - -**Resultado:** -- Propuesta aprobada **con módulos adicionales** -- Alcance ajustado (timeline + presupuesto) -- Zero retrabajo futuro -- Cliente reconoció valor agregado - ---- - -## Implementación - -### Mi Rol en el Proyecto - -**1. Solution Architect** -- Identificación de requisitos no funcionales -- Diseño de arquitectura (módulos, integraciones) -- Definición de tecnologías - -**2. Tech Lead** -- Liderazgo técnico del equipo (3 devs) -- Code review -- Definición de estándares de código -- Gestión de riesgos técnicos - -**3. Product Owner Técnico** -- Creación de **user stories** completas -- Priorización de backlog -- Refinamiento continuo con cliente - ---- - -### Stack Técnico Elegido - -**Backend:** -- `.NET 7` - APIs REST -- `Entity Framework Core` - ORM -- `SQL Server` - Base de datos -- `ClosedXML` - Generación de Excel - -**Frontend:** -- `React` - Interfaz web -- `Material-UI` - Componentes -- `React Player` - Player de video -- `Chart.js` - Gráficos de progreso - -**Infraestructura:** -- `Azure App Service` - Hospedaje -- `Azure Blob Storage` - Almacenamiento de videos/imágenes -- `Azure SQL Database` - Base de datos gestionada - ---- - -### User Stories Creadas - -Escribí **32 user stories** cubriendo todos los flujos. Ejemplos: - -**US-01: Registrar Ruta (Admin)** -``` -Como administrador del sistema -Quiero registrar una nueva ruta de capacitación -Para que empleados puedan realizar los cursos - -Criterios de aceptación: -- Admin accede menú "Rutas" → "Nueva Ruta" -- Completa: Nombre, Descripción, Estado (Activa/Inactiva) -- Puede agregar microlearnings existentes a la ruta -- Define orden de los microlearnings (drag & drop) -- Sistema valida campos obligatorios -- Guarda y muestra mensaje de éxito -``` - -**US-15: Realizar Microlearning (Empleado)** -``` -Como empleado -Quiero realizar un microlearning de mi ruta -Para aprender sobre el tema y ganar puntos - -Criterios de aceptación: -- Empleado accede ruta asignada -- Ve lista de microlearnings (no completados primero) -- Hace clic en microlearning → abre pantalla con: - - Texto (2 párrafos) + Imagen O - - Player de video embebido -- Botón "Continuar" aparece después de: - - 30s (si texto) - - Final del video (si video) -- Marca microlearning como visto -- Pregunta de test aparece automáticamente -``` - -**US-22: Exportar Resultados (Admin)** -``` -Como administrador -Quiero exportar resultados de capacitación a Excel -Para generar reportes de compliance y auditorías - -Criterios de aceptación: -- Admin accede "Reportes" → "Exportar" -- Selecciona filtros (período, ruta, área) -- Hace clic "Generar Excel" -- Sistema procesa y descarga archivo .xlsx -- Excel contiene columnas: Nombre, Matrícula, Ruta, Puntos, Fecha, Respuestas -- Formato legible (headers en negrita, columnas autoajustadas) -``` - ---- - -## Características Clave Implementadas - -### 1. Sistema de Puntuación Gamificado - -**Mecánica:** -- Cada pregunta vale puntos (configurable) -- Respuesta "Excelente": 10 puntos -- Respuesta "Regular": 5 puntos -- Respuesta "Malo": 0 puntos - -**Dashboard del empleado:** -- Puntuación total -- Ranking (opcional, configurable) -- Badges por rutas completadas -- Progreso visual (barra de %) - -**Por qué funciona:** -Empleados de planta se enganchan más con elementos de gamificación. - ---- - -### 2. Microlearning Adaptativo - -**Tipos de contenido:** - -**Texto + Imagen:** -- 2 párrafos (máx 300 palabras) -- 1 imagen ilustrativa -- Ideal para: Procedimientos, normas, conceptos - -**Video:** -- Videos cortos (2-5 min) -- Player embebido (YouTube/Vimeo o upload) -- Ideal para: Demostraciones, operaciones de equipo - -**¿Por qué microlearning?** -- Empleados realizan en el intervalo (10-15min) -- Contenido corto = mayor retención -- Facilita actualización (vs cursos largos) - ---- - -### 3. Sistema de Administración Delegada - -**Jerarquía:** - -``` -Admin General (RRHH) - ↓ puede promover -Admin de Área (Gerentes) - ↓ puede visualizar solo -Empleados de su área -``` - -**Permisos:** -- Admin general: Crea rutas, promueve admins, ve todos los datos -- Admin de área: Ve solo reportes de su área -- Empleado: Solo realiza capacitaciones - -**Auditoría:** -- Logs de quién creó/editó cada contenido -- Histórico de promociones a admin -- Compliance SOX/ISO - ---- - -### 4. Exportación para Compliance - -**Formato del Excel generado:** - -| Matrícula | Nombre | Área | Ruta | Fecha Conclusión | Puntos | Estado | -|-----------|------|------|--------|----------------|--------|--------| -| 1001 | João Silva | Producción | Seguridad NR-20 | 15/11/2024 | 95/100 | ✅ Aprobado | -| 1002 | María Santos | Logística | Manejo Gases | 14/11/2024 | 78/100 | ✅ Aprobado | - -**Pestaña adicional: Detalle de Respuestas** -- Permite auditoría: "¿Empleado X acertó pregunta Y?" -- Evidencia para procesos laborales -- Compliance NR-13/NR-20 - ---- - -## Resultados e Impacto - -### Sistema en Producción - -**Estado actual:** En uso hace 4+ meses - -**Métricas de adopción:** -- 👥 500+ empleados registrados -- 📚 12 rutas activas -- 📖 150+ microlearnings creados -- ✅ 8.000+ capacitaciones completadas -- 📊 100+ reportes exportados (compliance) - -**Tasa de conclusión:** 87% (media industria: 45%) - ---- - -### Impacto en el Cliente - -**Antes:** -- Capacitaciones presenciales (costo alto, agenda difícil) -- Evidencias en papel (pérdidas, difícil auditoría) -- Dificultad en actualizar contenido - -**Después:** -- Capacitación asíncrona (empleado realiza cuando puede) -- Evidencias digitales (compliance facilitado) -- RRHH actualiza contenido sin llamar a TI -- Reducción del 70% en costo de capacitación - -**Feedback del cliente:** -> "Si hubiéramos implementado solo lo que pedimos inicialmente, el sistema sería inútil. El análisis previo salvó el proyecto." - ---- - -### Valor del Solution Design - -**ROI del análisis preventa:** - -**Escenario A (sin análisis):** -1. Desarrollar solo interfaz (2 meses) -2. Cliente prueba y percibe que falta CRUD (1 mes después) -3. Refacción para agregar módulos (2+ meses) -4. **Total: 5+ meses + frustración del cliente** - -**Escenario B (con análisis - lo que hicimos):** -1. Identificar requisitos antes (1 semana) -2. Aprobar alcance completo (1 semana) -3. Desarrollar solución correcta (4 meses) -4. **Total: 4 meses + cliente satisfecho** - -**Economía:** 1+ mes de retrabajo + costo de oportunidad - ---- - -## Tech Stack - -`.NET 7` `C#` `Entity Framework Core` `SQL Server` `React` `Material-UI` `Azure App Service` `Azure Blob Storage` `ClosedXML` `Chart.js` `User Stories` `Solution Design` `Tech Lead` - ---- - -## Decisiones Clave & Trade-offs - -### ¿Por qué no usar LMS listo? (Moodle, Canvas) - -**Alternativas consideradas:** -1. ❌ Moodle (open-source, gratuito) -2. ❌ Totara/Canvas (LMS corporativo) -3. ✅ **Desarrollo custom** - -**Justificación:** -- LMS genérico: Complejidad innecesaria (foros, wikis, etc) -- Cliente quiere **solo microlearning** (simplicidad) -- Costo de licencia LMS > costo de dev custom -- Integración con AD/SSO del cliente (más fácil custom) -- UX optimizada para planta (mobile-first, touch) - ---- - -### ¿Por qué 3 opciones de respuesta (vs 4-5)? - -**Elección:** Verde (Excelente), Amarillo (Regular), Rojo (Malo) - -**Justificación:** -- Empleados de planta prefieren simplicidad -- Colores universales (semáforo) -- Evita paradoja de la elección (menos opciones = más engagement) -- Gamificación más clara - ---- - -### ¿Por qué Export Excel (vs Dashboard online)? - -**Ambos fueron implementados**, pero Excel es crítico para: - -**Compliance regulatorio:** -- Auditores piden "archivo firmado digitalmente" -- NR-13/NR-20 exigen evidencia física -- Procesos laborales aceptan Excel - -**Flexibilidad:** -- RRHH puede hacer análisis personalizados en Excel -- Combinar con otras fuentes de datos -- Presentaciones para dirección - ---- - -## Lecciones Aprendidas - -### 1. Solution Design Previene Retrabajo - -**Lección:** 1 semana de análisis crítico economiza meses de refacción. - -**Aplicación:** -- Siempre cuestionar especificaciones incompletas -- Pensar en el "día siguiente" (¿quién gestiona esto en producción?) -- Involucrar cliente en discusiones de requisitos - ---- - -### 2. UX ≠ Requisitos Funcionales - -**Lección:** Wireframes bonitos no sustituyen análisis de requisitos. - -**UX se enfoca en:** Cómo usuario **usa** el sistema -**Solution Design se enfoca en:** Cómo sistema **funciona** end-to-end - -Ambos son necesarios y complementarios. - ---- - -### 3. Preguntar "¿Cómo?" es Más Importante que "¿Qué?" - -**Cliente dice:** "Quiero rutas y microlearnings" -**Solution Designer pregunta:** "¿Cómo entra la primera ruta al sistema?" - -Esta pregunta simple reveló 4 módulos faltantes. - ---- - -### 4. User Stories Bien Escritas Aceleran Desarrollo - -**Inversión:** 2 semanas escribiendo 32 user stories detalladas - -**Retorno:** -- Devs sabían exactamente qué construir -- Zero ambigüedad -- Muy pocos bugs (requisitos claros) -- Cliente validó historias antes de codificar - -**Lección:** Tiempo gastado en planificación reduce tiempo de desarrollo. - ---- - -### 5. Compliance es Requisito Oculto - -**En industrias reguladas** (salud, energía, químico), siempre habrá: -- Necesidad de auditoría -- Exportación de evidencias -- Logs de quién hizo qué - -**Lección:** Preguntar sobre compliance **antes**, no después. - ---- - -## Desafíos Superados - -| Desafío | Solución | Resultado | -|---------|---------|-----------| -| Especificación incompleta | Análisis crítico preventa | Alcance correcto desde inicio | -| Cliente sin conocimiento técnico | User stories en lenguaje de negocio | Cliente validó requisitos | -| Empleados con baja familiaridad digital | UX simplificado (3 botones, colores) | 87% tasa de conclusión | -| Compliance NR-13/NR-20 | Export Excel con detalle | Aprobado en 2 auditorías | -| Gestión de 500+ usuarios | Import CSV + jerarquía de admins | Onboarding en 1 semana | - ---- - -## Próximos Pasos (Roadmap Futuro) - -**Funcionalidades planificadas:** - -1. **Notificaciones Push** - - Recordar empleado de capacitación pendiente - - Avisar de nueva ruta obligatoria - -2. **App Mobile Nativo** - - Offline-first (videos descargados) - - Empleados sin computadora - -3. **Certificados Digitales** - - PDF firmado digitalmente - - QR code para validación - -4. **Inteligencia de Datos** - - ¿Qué microlearnings tienen más error? - - Identificar gaps de conocimiento por área - ---- - -**Resultado:** Sistema funcional en producción, cliente satisfecho, zero retrabajo - todo porque 1 semana fue invertida en **pensar antes de codificar**. - -[¿Necesita análisis crítico de requisitos? Póngase en contacto](#contact) diff --git a/Content/Cases/es/pharma-digital-transformation.md b/Content/Cases/es/pharma-digital-transformation.md deleted file mode 100644 index 3dee40b..0000000 --- a/Content/Cases/es/pharma-digital-transformation.md +++ /dev/null @@ -1,577 +0,0 @@ ---- -title: "MVP Digital para Laboratorio Farmacéutico - De Cero a Producción" -slug: "pharma-digital-transformation" -summary: "Liderazgo de squad en proyecto greenfield para laboratorio farmacéutico, construyendo MVP de plataforma digital con integraciones complejas (Salesforce, Twilio, APIs oficiales) partiendo de cero absoluto - sin Git, sin servidores, sin infraestructura." -client: "Laboratorio Farmacéutico" -industry: "Farmacéutica & Salud" -timeline: "4 meses (2 meses de retraso planificado)" -role: "Tech Lead & Solution Architect" -image: "" -tags: - - MVP - - Digital Transformation - - .NET - - React - - Next.js - - Salesforce - - Twilio - - SQL Server - - Tech Lead - - Greenfield -featured: true -order: 3 -date: 2023-03-01 -seo_title: "MVP Digital Farmacéutico - Transformación Digital de Cero" -seo_description: "Caso de construcción de MVP digital para laboratorio farmacéutico partiendo de cero: sin Git, sin infraestructura, con integraciones complejas y entrega exitosa." -seo_keywords: "MVP, digital transformation, pharma, .NET, React, Next.js, Salesforce, greenfield project, tech lead" ---- - -## Descripción General - -Laboratorio farmacéutico en el **inicio de transformación digital** contrata consultoría para construir plataforma de descuentos para médicos prescriptores, partiendo de prototipo en WordPress. - -**Desafío único:** Comenzar proyecto greenfield en empresa **sin infraestructura básica** de desarrollo - sin Git, sin servidores aprovisionados, sin procesos definidos. - -**Contexto:** Proyecto ejecutado en ambiente de múltiples squads. **Entrega exitosa en producción** a pesar de los desafíos iniciales de infraestructura, con retraso controlado de 2 meses. - ---- - -## Desafío - -### Transformación Digital... Partiendo de Cero Absoluto - -**Estado inicial de la empresa (2023):** - -❌ **Sin Git/versionamiento** -- Código solo en máquinas locales -- Histórico inexistente -- Colaboración imposible - -❌ **Sin servidores aprovisionados** -- Ambiente de desarrollo inexistente -- Homologación no configurada -- Producción no preparada - -❌ **Sin procesos de desarrollo** -- Sin CI/CD -- Sin code review -- Sin gestión de tareas estructurada - -❌ **Sin equipo técnico interno experimentado** -- Equipo sin familiaridad con stacks modernos -- Primer contacto con React, APIs REST -- Inexperiencia con integraciones complejas - -**Punto de partida técnico:** -- Prototipo funcional en **WordPress** -- Contenido y textos ya aprobados -- UX/UI definido -- Reglas de negocio documentadas (parcialmente) - ---- - -### Integraciones Complejas Requeridas - -El MVP necesitaba integrar con múltiples sistemas externos: - -1. 🔐 **Salesforce** - Registro de pedidos de descuento -2. 📱 **Twilio** - SMS para validación de login (2FA) -3. 🏥 **API oficial de médicos** - Validación de CRM + datos profesionales -4. 🎯 **Interplayers** - Envío de registros de descuento por CPF -5. 📄 **WordPress** - Lectura de contenido (CMS headless) -6. 💾 **SQL Server** - Persistencia de datos - -**Complejidad adicional:** -- Diferentes credenciales/ambientes por integración -- SLAs variados (Twilio crítico, WordPress tolerante) -- Tratamiento de errores específico por provider -- Compliance LGPD (datos sensibles de médicos) - ---- - -## Arquitectura de Solución - -### Estrategia: Start Small, Build Solid - -**Decisión inicial:** Explicar al equipo el proceso que seguiríamos, estableciendo fundaciones antes de codificar. - -### Fase 1: Setup de Infraestructura Básica (Semanas 1-2) - -Incluso sin servidores aprovisionados, inicié setup esencial: - -**Git & Versionamiento:** -```bash -# Repositorio estructurado desde día 1 -git init -git flow init # Branch strategy definida - -# Estructura de monorepo -/ -├── frontend/ # Next.js + React -├── backend/ # .NET APIs -├── cms-adapter/ # WordPress integration -└── docs/ # Arquitectura y ADRs -``` - -**Proceso explicado al equipo:** -1. ✅ Todo en Git (commits atómicos, mensajes descriptivos) -2. ✅ Feature branches (nunca commit directo en main) -3. ✅ Code review obligatorio (2 aprobaciones) -4. ✅ CI/CD preparado (para cuando servidores estén listos) - -**Ambientes locales primero:** -- Docker Compose para desarrollo local -- Mock de APIs externas (hasta que lleguen credenciales) -- SQL Server local con seeds de datos - ---- - -### Fase 2: Arquitectura Moderna & Desacoplada - -``` -┌─────────────────────────────────────────────────────┐ -│ FRONTEND (Next.js + React) │ -│ - SSR para SEO │ -│ - Client-side para interactividad │ -│ - Consumo de APIs │ -└────────────┬────────────────────────────────────────┘ - │ - ▼ -┌─────────────────────────────────────────────────────┐ -│ BACKEND APIs (.NET 7) │ -│ - REST APIs │ -│ - Authentication/Authorization │ -│ - Business logic │ -│ - Orchestration layer │ -└────┬────┬────┬────┬────┬─────────────────────────┬──┘ - │ │ │ │ │ │ - ▼ ▼ ▼ ▼ ▼ ▼ -┌────────┐ ┌──────┐ ┌──────┐ ┌────────┐ ┌────────┐ ┌──────────┐ -│Salesf. │ │Twilio│ │CRM │ │Interpl.│ │WordPr. │ │SQL Server│ -│ │ │ │ │API │ │ │ │(CMS) │ │ │ -└────────┘ └──────┘ └──────┘ └────────┘ └────────┘ └──────────┘ -``` - -**Stack elegido:** - -**Frontend:** -- `Next.js 13` - SSR, routing, optimizaciones -- `React 18` - Componentes, hooks, context -- `TypeScript` - Type safety -- `Tailwind CSS` - Styling moderno - -**Backend:** -- `.NET 7` - APIs REST -- `Entity Framework Core` - ORM -- `SQL Server 2019` - Base de datos -- `Polly` - Resilience patterns (retry, circuit breaker) - -**¿Por qué Next.js en vez de mantener WordPress?** -- ✅ Performance (SSR vs PHP monolítico) -- ✅ SEO optimizado (crítico para farmacéutica) -- ✅ Experiencia moderna (SPA cuando necesario) -- ✅ Escalabilidad -- ✅ WordPress mantenido solo como CMS (headless) - ---- - -### Fase 3: Integraciones (Core del Proyecto) - -#### 1. Salesforce - Campañas y Registro de Pedidos - -**Solución implementada:** - -Salesforce fue configurado para gestionar dos funcionalidades principales: - -**a) Campañas de descuento:** -- Marketing configura campañas en Salesforce (medicamento X, descuento Y%, período) -- Backend consulta campañas activas vía API -- Frontend (Next.js) muestra porcentaje de descuento disponible basado en: medicamento + campaña activa - -**b) Registro de pedidos:** -- Usuario informa: CRM del médico, UF, CPF del paciente, medicamento -- Sistema valida datos (CRM real vía API oficial, CPF válido) -- Porcentaje es calculado automáticamente (campañas de Salesforce + reglas del CMS) -- Pedido es registrado en Salesforce con todos los datos (compliance LGPD) - -**Desafíos técnicos superados:** -- Autenticación OAuth2 con refresh token automático -- Rate limiting (Salesforce tiene límites de API/día) -- Retry logic para fallas transitorias (Polly) -- Enmascaramiento de CPF para logs (LGPD) - ---- - -#### 2. Twilio - Autenticación por SMS (2FA) - -**Solución implementada:** - -Sistema de autenticación de dos factores para garantizar seguridad: - -**Flujo de login:** -1. Usuario informa teléfono -2. Backend genera código de 6 dígitos (válido por 5 minutos) -3. SMS enviado vía Twilio ("Su código: 123456") -4. Usuario digita código en frontend -5. Backend valida código + timestamp de expiración -6. Token JWT emitido tras validación exitosa - -**Compliance y auditoría:** -- Teléfonos enmascarados en logs (LGPD) -- Auditoría completa (quién, cuándo, qué SMS) -- Tasa de entrega: 99.8% - ---- - -#### 3. API Oficial de Médicos (Consejo Regional de Medicina) - -**Solución implementada:** - -Validación automática de médicos vía API oficial de los consejos de medicina: - -**Validaciones realizadas:** -- CRM existe y está activo en el consejo -- Nombre del médico corresponde al CRM informado -- Especialidad es permitida (regla de negocio del laboratorio) -- UF corresponde al estado de registro - -**Optimizaciones:** -- Cache de 24 horas para reducir llamadas a API oficial -- Fallback en caso de API fuera del aire (notifica admin) -- Retry automático para fallas transitorias - -**Por qué esto importa:** -Garantiza que solo médicos reales y activos puedan prescribir descuentos, evitando fraudes. - ---- - -#### 4. WordPress como CMS Headless - -**Solución implementada:** - -Marketing continúa gestionando contenido en WordPress (familiar), pero frontend es Next.js moderno. - -**Arquitectura:** -- WordPress: Gestiona textos, imágenes, reglas de campañas -- WordPress REST API: Expone contenido vía JSON -- Next.js: Consume API y renderiza con SSR (SEO optimizado) - -**Beneficios:** -- ✅ Marketing no necesita aprender nueva herramienta -- ✅ Frontend moderno (performance, UX) -- ✅ SEO optimizado (Server-Side Rendering) -- ✅ Separación clara de responsabilidades (contenido vs código) - ---- - -### Fase 4: Resiliencia & Error Handling - -Con múltiples integraciones externas, fallas son inevitables. La solución fue implementar **patrones de resiliencia** usando biblioteca Polly (.NET): - -**Patrones implementados:** - -**1. Retry (Reintentar)** -- Si Salesforce/Twilio/CRM API fallan, sistema intenta automáticamente 2-3x -- Espera crece exponencialmente (1s, 2s, 4s) para evitar sobrecarga -- Solo errores transitorios (timeout, 503) son reintentados - -**2. Circuit Breaker (Disyuntor)** -- Si servicio falla 5x seguidas, "abre el circuito" por 30s -- Durante 30s, no intenta más (evita desperdiciar recursos) -- Tras 30s, intenta nuevamente (puede haber vuelto) - -**3. Timeout** -- Cada integración tiene tiempo máximo de respuesta -- Evita requisiciones trabadas indefinidamente - -**4. Fallback (Plan B)** -- Salesforce fuera: Pedido va a cola, procesa después -- Twilio fuera: Alerta administrador vía email -- CRM API fuera: Usa cache (datos de 24h atrás) -- WordPress fuera: Muestra contenido estático precargado - -**Estrategias por integración:** - -| Integración | Retry | Circuit Breaker | Timeout | Plan B | -|----------|-------|-----------------|---------|----------| -| Salesforce | 3x (exponencial) | 5 fallas/30s | 10s | Cola de retry | -| Twilio | 2x (lineal) | 3 fallas/60s | 5s | Alerta admin | -| CRM API | 3x (exponencial) | No | 15s | Cache | -| WordPress | No | No | 3s | Contenido estático | - -**Resultado en producción:** -- Salesforce tuvo mantenimiento (1h) → Sistema continuó funcionando (cola procesó después) -- Twilio tuvo inestabilidad → Retry automático resolvió 95% de los casos -- Zero downtime percibido por los usuarios - ---- - -## Superando Desafíos de Infraestructura - -### Problema: Servidores No Aprovisionados - -**Solución temporal:** -1. Desarrollo 100% local (Docker Compose) -2. Mocks de servicios externos (cuando credenciales se retrasaron) -3. CI/CD preparado pero no activo (esperando infra) - -**Cuando llegaron servidores (semana 6):** -- Deploy en 2 horas (ya estaba preparado) -- Zero sorpresas (todo probado localmente) -- Rollout suave - ---- - -### Problema: Credenciales de Integración Retrasadas - -**Impacto:** Twilio y Salesforce demoraron 3 semanas en ser aprovisionadas. - -**Solución:** Crear versiones "mock" (simuladas) de cada integración: -- Mock de Twilio: Registra en log en vez de enviar SMS real -- Mock de Salesforce: Guarda pedido en archivo JSON local -- Mock de CRM API: Retorna datos ficticios de médicos - -**Cómo funciona:** -- Ambiente de desarrollo: Usa mocks (no necesita credenciales) -- Ambiente de producción: Usa integraciones reales (cuando lleguen credenciales) -- Cambio automático basado en configuración - -**Resultado:** Equipo continuó 100% productivo durante 3 semanas, probando flujos completos sin depender de credenciales. - ---- - -### Problema: Equipo Inexperto con Stack Moderno - -**Contexto:** Equipo no tenía experiencia con React, TypeScript, .NET Core moderno, APIs REST. - -**Abordaje de capacitación:** - -**1. Pair Programming (1h/día por desarrollador)** -- Tech lead trabaja al lado del dev -- Compartir pantalla + explicación en tiempo real -- Dev escribe código, tech lead guía - -**2. Code Review Educativo** -- No solo "aprobar" o "reprobar" -- Comentarios explican el **por qué** de cada sugerencia -- Ejemplo: "Siempre trate errores de requisiciones! Si API cae, usuario necesita saber qué pasó." - -**3. Documentación Viva** -- ADRs (Architecture Decision Records): ¿Por qué elegimos X y no Y? -- READMEs: Cómo ejecutar, cómo probar, cómo deployar -- Onboarding guide: De cero a primera feature - -**4. Live Coding Semanal (2h)** -- Tech lead resuelve problema real en vivo -- Equipo observa proceso de pensamiento -- Q&A al final - -**Resultado:** -- Tras 4 semanas, equipo estaba autónomo -- Calidad de código aumentó consistentemente -- Devs pasaron a hacer code review entre sí (peer review) - ---- - -## Resultados e Impacto - -### Entrega Exitosa A Pesar de los Desafíos - -**Contexto:** Programa con múltiples squads trabajando en paralelo. - -**Resultado alcanzado:** -- ✅ **MVP entregado en producción con éxito** -- ✅ Retraso controlado de 2 meses (significativamente menor que otras iniciativas del programa) -- ✅ Todas las integraciones funcionando según planificado -- ✅ Zero critical bugs en producción (primera semana) - -**¿Por qué la entrega fue exitosa?** - -1. **Setup anticipado** - Git, procesos, Docker local desde día 1 -2. **Mocks estratégicos** - Equipo no quedó bloqueado esperando infra -3. **Arquitectura sólida** - Resiliencia desde el inicio -4. **Upskilling continuo** - Equipo aprendió haciendo -5. **Comunicación proactiva** - Riesgos reportados temprano - ---- - -### Métricas del MVP - -**Performance:** -- ⚡ Tiempo de carga: <2s (95th percentile) -- 📱 Lighthouse score: 95+ (mobile) -- 🔒 SSL A+ rating - -**Integraciones:** -- 📊 Salesforce: 100% de pedidos sincronizados -- 📱 Twilio: 99.8% delivery rate -- 🏥 CRM API: 10k validaciones/día (media) -- 💾 SQL Server: 50k registros/mes - -**Adopción:** -- 👨‍⚕️ 2.000+ médicos registrados (3 primeros meses) -- 🎯 15.000+ pedidos de descuento procesados -- ⭐ 4.8/5 satisfacción (encuesta interna) - ---- - -### Impacto en el Cliente - -**Transformación digital iniciada:** -- ✅ Git implementado y adoptado -- ✅ Procesos de desarrollo establecidos -- ✅ Equipo interno capacitado en stacks modernos -- ✅ Infraestructura cloud configurada (Azure) -- ✅ Roadmap de evolución definido - -**Base para futuros proyectos:** -- Arquitectura sirvió de referencia para otras iniciativas -- Patrones de código documentados (coding standards) -- Pipelines CI/CD reutilizados - ---- - -## Tech Stack - -`.NET 7` `C#` `Entity Framework Core` `SQL Server` `React 18` `Next.js 13` `TypeScript` `Tailwind CSS` `Salesforce API` `Twilio` `WordPress REST API` `Docker` `Polly` `OAuth2` `JWT` `LGPD Compliance` - ---- - -## Decisiones Clave & Trade-offs - -### ¿Por qué Next.js en vez de React puro? - -**Requisitos:** -- SEO crítico (farmacéutica necesita ranquear) -- Performance (médicos usan mobile) -- Contenido dinámico (WordPress) - -**Next.js ofrece:** -- ✅ SSR out-of-the-box -- ✅ API routes (BFF pattern) -- ✅ Optimizaciones automáticas (image, fonts) -- ✅ Deploy simplificado (Vercel, Azure) - ---- - -### ¿Por qué mantener WordPress? - -**Alternativas consideradas:** -1. ❌ Migrar contenido a BD + CMS custom (tiempo) -2. ❌ Strapi/Contentful (costos + learning curve) -3. ✅ **WordPress headless** (mejor trade-off) - -**Ventajas:** -- Equipo de marketing ya sabe usar -- Contenido aprobado ya estaba ahí -- WordPress REST API es sólida -- Costo cero (ya estaba ejecutándose) - ---- - -### ¿Por qué .NET 7 en vez de Node.js? - -**Contexto:** Cliente tenía preferencia por stack Microsoft. - -**Beneficios adicionales:** -- Performance superior (vs Node en APIs) -- Type safety nativa (C#) -- Entity Framework (ORM maduro) -- Integración fácil con Azure (deploy futuro) -- Equipo del cliente tenía familiaridad - ---- - -## Lecciones Aprendidas - -### 1. Infraestructura Retrasada? Prepare Alternativas - -No espere servidores/credenciales para comenzar: -- Docker local es su amigo -- Mocks permiten progreso -- CI/CD puede ser preparado antes de tener dónde deployar - -**Lección:** Controle lo que puede controlar. - ---- - -### 2. Procesos > Herramientas - -Incluso sin Git corporativo, establecí: -- Branching strategy -- Code review -- Commit conventions -- Documentation standards - -**Resultado:** Cuando llegaron herramientas, equipo ya sabía usarlas. - ---- - -### 3. Upskilling es Inversión, No Costo - -Pair programming y code reviews tomaron tiempo, pero: -- ✅ Equipo quedó autónomo más rápido -- ✅ Calidad de código aumentó -- ✅ Knowledge sharing natural -- ✅ Onboarding de nuevos devs simplificado - ---- - -### 4. Resiliencia Desde el Inicio - -Implementar Polly (retry, circuit breaker) al inicio salvó en producción: -- Twilio tuvo inestabilidad (resuelta automáticamente) -- Salesforce tuvo mantenimiento (queue funcionó) -- CRM API tuvo lentitud (cache mitigó) - -**Lección:** No deje resiliencia para "después". Fallas van a ocurrir. - ---- - -### 5. Comunicación Clara de Riesgos - -Reporté semanalmente: -- Bloqueos (infraestructura, credenciales) -- Riesgos (plazos, dependencias) -- Soluciones alternativas (mocks, workarounds) - -**Resultado:** Stakeholders sabían exactamente el estado y no tuvieron sorpresas. - ---- - -## Desafíos & Cómo Fueron Superados - -| Desafío | Impacto | Solución | Resultado | -|---------|---------|---------|-----------| -| Sin Git | Bloqueo total | Setup local + GitLab Cloud | Equipo productivo día 1 | -| Sin servidores | Sin ambiente de dev | Docker Compose local | Dev/test local completo | -| Credenciales retrasadas | Integración bloqueada | Mock services | Progreso sin bloqueo | -| Equipo inexperto | Código de baja calidad | Pair prog + Code review | Ramp-up en 4 semanas | -| Múltiples integraciones | Complejidad alta | Polly + patterns | Zero downtime prod | - ---- - -## Próximos Pasos (Post-MVP) - -**Roadmap sugerido al cliente:** - -1. **Fase 2: Expansión de funcionalidades** - - Dashboard para médicos (histórico de pedidos) - - Notificaciones push (Firebase) - - Integración con e-commerce (compra directa) - -2. **Fase 3: Optimizaciones** - - Cache distribuido (Redis) - - CDN para assets estáticos - - Analytics avanzado (Amplitude) - -3. **Fase 4: Escala** - - Kubernetes (AKS) - - Microservicios (quebrar monolito) - - Event-driven architecture (Azure Service Bus) - ---- - -**Resultado:** MVP entregado en producción a pesar de comenzar literalmente de cero, estableciendo fundaciones sólidas para transformación digital del cliente. - -[¿Necesita construir un MVP en escenario desafiante? Póngase en contacto](#contact) diff --git a/Content/Cases/es/sap-integration-healthcare.md b/Content/Cases/es/sap-integration-healthcare.md deleted file mode 100644 index 0a6a430..0000000 --- a/Content/Cases/es/sap-integration-healthcare.md +++ /dev/null @@ -1,211 +0,0 @@ ---- -title: "Sistema de Integración SAP Healthcare" -slug: "sap-integration-healthcare" -summary: "Integración bidireccional procesando 100k+ transacciones/día con 99.9% uptime" -client: "Confidencial - Multinacional Healthcare" -industry: "Healthcare" -timeline: "6 meses" -role: "Arquitecto de Integración" -image: "" -tags: - - SAP - - C# - - .NET - - Integraciones - - Enterprise - - Healthcare -featured: true -order: 1 -date: 2023-06-15 -seo_title: "Caso: Integración SAP Healthcare - 100k Transacciones/Día" -seo_description: "Cómo arquitectamos sistema de integración SAP procesando 100k+ transacciones diarias con 99.9% uptime para empresa healthcare." -seo_keywords: "integración SAP, C#, .NET, SAP Connector, enterprise integration, healthcare" ---- - -## Descripción General - -**Cliente:** Multinacional Healthcare (confidencial) -**Tamaño:** 100.000+ empleados -**Proyecto:** Integración de beneficios -**Timeline:** 6 meses -**Mi Rol:** Arquitecto de Integración - ---- - -## Desafío - -El cliente tenía sistema interno de gestión de beneficios que necesitaba sincronizar con SAP ECC para procesar nómina. - -### Dolores principales: -- Proceso manual sujeto a errores -- 3-5 días de delay entre sistemas -- 100k empleados esperando procesamiento -- Picos de carga (fin de mes) - -### Constraints: -- Presupuesto limitado (sin SAP BTP) -- Equipo SAP interno pequeño (2 desarrolladores) -- Plazo ajustado (6 meses go-live) -- Sistema legacy .NET Framework 4.5 - ---- - -## Solución - -Arquitectura de integración bidireccional: - -``` -[Sistema Interno] ←→ [Queue] ←→ [SAP Connector] ←→ [SAP ECC] - ↓ ↓ - [MongoDB Logs] [ABAP Z_BENEFITS] -``` - -### Componentes: -- .NET Service con SAP Connector (NCo 3.0) -- ABAP transaction personalizada (Z_BENEFITS) -- Queue system (RabbitMQ) para retry logic -- MongoDB para auditoría y troubleshooting -- Scheduler (Hangfire) para batch processing - -### Flujo: -1. Sistema genera cambios (new hire, alteraciones) -2. Service procesa batch (500 registros/vez) -3. SAP Connector llama Z_BENEFITS vía RFC -4. SAP retorna estado (éxito/error) -5. Retry automático si falla (máx 3x) -6. Logs MongoDB para troubleshooting - ---- - -## Resultado - -### Métricas: -- **100k+** transacciones/día procesadas -- **99.9%** uptime -- Reducción de **5 días → 4 horas** (delay) -- **80%** reducción tiempo procesamiento -- **Zero** errores manuales (vs 2-3% antes) - -### Beneficios: -- Empleados reciben beneficios a tiempo -- Equipo RRHH economiza 40h/mes (trabajo manual) -- Auditoría completa (compliance) -- Escalable (crecimiento 30% año sin refactor) - ---- - -## Tech Stack - -`C#` `.NET Framework 4.5` `SAP NCo 3.0` `RabbitMQ` `MongoDB` `Hangfire` `Docker` `SAP ECC` `ABAP` `RFC` - ---- - -## Decisiones & Motivación - -### 💡 Decisión 1: SAP Connector vs SAP BTP - -**Opciones evaluadas:** -- SAP BTP (eventos, APIs modernas, cloud) -- SAP Connector (RFC directo, on-premise) - -**Elegimos:** SAP Connector - -**Motivación:** -- Cliente tenía SAP ECC on-premise (no S/4) -- Presupuesto no permitía licencia BTP -- Equipo SAP cómodo con ABAP/RFC -- Necesidades atendidas con RFC (no necesitaba event-driven real-time) - -**Trade-off aceptado:** -- Menos "moderno" que BTP, pero 100% funcional -- Costo $0 adicional vs $30k+/año BTP -- Delivery 2 meses más rápido (sin learning curve BTP) - ---- - -### 💡 Decisión 2: Queue System vs Llamadas Directas - -**Opciones evaluadas:** -- Llamadas síncronas directas (más simple) -- Queue con retry (más complejo) - -**Elegimos:** Queue + Retry - -**Motivación:** -- SAP ocasionalmente indisponible (mantenimiento) -- Picos de carga (fin de mes = 200k reqs) -- Garantizar zero pérdida de datos -- Resiliencia > simplicidad (ambiente crítico) - -**Implementación:** -- RabbitMQ con dead-letter queue -- Retry exponencial (1min, 5min, 15min) -- Alertas si 3 fallas consecutivas - -**Resultado:** -- Zero pérdida datos en 2 años producción -- Equipo RRHH no necesita "estar pendiente" - ---- - -### 💡 Decisión 3: ABAP Personalizado vs Standard - -**Opciones evaluadas:** -- BAPIs standard SAP (zero código ABAP) -- Transaction personalizada (Z_BENEFITS) - -**Elegimos:** Transaction personalizada - -**Motivación:** -- BAPIs standard no tenían validaciones específicas del negocio -- Cliente quería lógica centralizada en SAP (single source of truth) -- Permitió validaciones complejas (elegibilidad, dependientes, límites) - -**Trade-off:** -- Requiere mantenimiento ABAP (equipo SAP interno) -- Pero: Cliente prefirió vs lógica dual (riesgo desincronización) - ---- - -### ❌ Alternativas NO Elegidas - -**Webhook/Callback (Event-Driven):** -- Cliente no tenía infraestructura exponer APIs -- Sistema interno detrás de firewall -- Polling batch funciona bien para el caso - -**Microservicios Kubernetes:** -- Overkill para integración única -- Equipo no tenía expertise K8s -- Docker simple suficiente - -**Real-time Sync (<1min):** -- Negocio no necesita (batch diario ok) -- Costo infra aumentaría 3x -- 4h delay es aceptable para nómina - ---- - -## Aprendizajes - -### ✅ Lo que funcionó muy bien: -- Involucrar equipo SAP desde día 1 (buy-in) -- MongoDB para logs (troubleshooting 10x más rápido) -- Retry logic salvó innumerables veces - -### 🔄 Lo que haría diferente: -- Agregar health check endpoint más temprano -- Dashboard de monitoreo desde inicio (agregamos después) - -### 📚 Lecciones para próximos proyectos: -- Cliente "presupuesto limitado" ≠ "solución limitada" - creatividad resuelve -- Documentar TODAS decisiones arquitecturales (team turnover) -- Simplicidad vence complejidad cuando ambas funcionan (KISS) - ---- - -## ¿Necesita Algo Similar? - -Integraciones SAP complejas, sistemas legacy, o arquitectura de alta disponibilidad? - -[Conversemos sobre su desafío →](/#contact) diff --git a/Controllers/CasesController.cs b/Controllers/CasesController.cs index 51bcd6d..acb4c73 100644 --- a/Controllers/CasesController.cs +++ b/Controllers/CasesController.cs @@ -1,4 +1,5 @@ using Microsoft.AspNetCore.Mvc; +using CarneiroTech.Resources; using CarneiroTech.Services; namespace CarneiroTech.Controllers; @@ -7,11 +8,13 @@ public class CasesController : Controller { private readonly ICaseService _caseService; private readonly ILogger _logger; + private readonly ILanguageService _languageService; - public CasesController(ICaseService caseService, ILogger logger) + public CasesController(ICaseService caseService, ILogger logger, ILanguageService languageService) { _caseService = caseService; _logger = logger; + _languageService = languageService; } // GET: /cases @@ -22,13 +25,14 @@ public class CasesController : Controller : await _caseService.GetCasesByTagAsync(tag); var allTags = await _caseService.GetAllTagsAsync(); + var lang = _languageService.GetCurrentLanguage(HttpContext); ViewData["Title"] = string.IsNullOrEmpty(tag) - ? "Cases - Carneiro Tech" - : $"Cases: {tag} - Carneiro Tech"; + ? SiteStrings.Get("cases.seo.title", lang) + : string.Format(SiteStrings.Get("cases.seo.title.tag", lang), tag); ViewData["Description"] = string.IsNullOrEmpty(tag) - ? "Explore our portfolio of technical consulting projects and solution design cases." - : $"Technical consulting cases related to {tag}."; + ? SiteStrings.Get("cases.seo.description", lang) + : string.Format(SiteStrings.Get("cases.seo.description.tag", lang), tag); ViewData["SelectedTag"] = tag; ViewBag.AllTags = allTags; diff --git a/Controllers/HomeController.cs b/Controllers/HomeController.cs index b208f87..f2ebc4d 100644 --- a/Controllers/HomeController.cs +++ b/Controllers/HomeController.cs @@ -2,6 +2,7 @@ using System.Diagnostics; using System.Text; using Microsoft.AspNetCore.Mvc; using CarneiroTech.Models; +using CarneiroTech.Resources; using CarneiroTech.Services; namespace CarneiroTech.Controllers; @@ -10,27 +11,31 @@ public class HomeController : Controller { private readonly ILogger _logger; private readonly ICaseService _caseService; + private readonly ILanguageService _languageService; - public HomeController(ILogger logger, ICaseService caseService) + public HomeController(ILogger logger, ICaseService caseService, ILanguageService languageService) { _logger = logger; _caseService = caseService; + _languageService = languageService; } public async Task Index() { var featuredCases = await _caseService.GetFeaturedCasesAsync(); + var lang = _languageService.GetCurrentLanguage(HttpContext); - ViewData["Title"] = "Carneiro Tech - Solution Design & Technical Consulting"; - ViewData["Description"] = "20+ years connecting business and technology. Specialized in technical proposals, MVP definition, and due diligence."; - ViewData["Keywords"] = "solution design, technical consulting, SAP integration, enterprise architecture, MVP definition, due diligence"; + ViewData["Title"] = SiteStrings.Get("home.seo.title", lang); + ViewData["Description"] = SiteStrings.Get("home.seo.description", lang); + ViewData["Keywords"] = SiteStrings.Get("home.seo.keywords", lang); return View(featuredCases); } public IActionResult Privacy() { - ViewData["Title"] = "Privacy Policy - Carneiro Tech"; + var lang = _languageService.GetCurrentLanguage(HttpContext); + ViewData["Title"] = SiteStrings.Get("privacy.title", lang); return View(); } diff --git a/Models/CaseMetadata.cs b/Models/CaseMetadata.cs index db8df79..7fe02e5 100644 --- a/Models/CaseMetadata.cs +++ b/Models/CaseMetadata.cs @@ -6,9 +6,10 @@ public class CaseMetadata public string Slug { get; set; } = string.Empty; public string Summary { get; set; } = string.Empty; public string Client { get; set; } = string.Empty; - public string Industry { get; set; } = string.Empty; - public string Timeline { get; set; } = string.Empty; - public string Role { get; set; } = string.Empty; + public string DeviceModel { get; set; } = string.Empty; + public decimal EstimatedSavings { get; set; } + public string Category { get; set; } = string.Empty; // Resgate, Upgrade, Manutenção + public string Thumbnail { get; set; } = string.Empty; public string Image { get; set; } = string.Empty; public List Tags { get; set; } = new(); public bool Featured { get; set; } diff --git a/Resources/SiteStrings.cs b/Resources/SiteStrings.cs index 0e032d2..5d468cc 100644 --- a/Resources/SiteStrings.cs +++ b/Resources/SiteStrings.cs @@ -5,80 +5,161 @@ namespace CarneiroTech.Resources public static Dictionary> Translations = new() { // Navigation - ["nav.services"] = new() { ["pt"] = "Serviços", ["en"] = "Services", ["es"] = "Servicios" }, + ["nav.services"] = new() { ["pt"] = "Especialidades", ["en"] = "Specialties", ["es"] = "Especialidades" }, ["nav.cases"] = new() { ["pt"] = "Cases", ["en"] = "Cases", ["es"] = "Casos" }, - ["nav.about"] = new() { ["pt"] = "Sobre", ["en"] = "About", ["es"] = "Acerca de" }, - ["nav.contact"] = new() { ["pt"] = "Contato", ["en"] = "Contact", ["es"] = "Contacto" }, + ["nav.about"] = new() { ["pt"] = "Por que nós?", ["en"] = "Why us?", ["es"] = "Por qué nosotros?" }, + ["nav.contact"] = new() { ["pt"] = "Agendar", ["en"] = "Schedule", ["es"] = "Agendar" }, + ["nav.menu"] = new() { ["pt"] = "Menu", ["en"] = "Menu", ["es"] = "Menu" }, // Hero Section - ["hero.greeting"] = new() { ["pt"] = "Bem-vindo ao Carneiro Tech", ["en"] = "Welcome to Carneiro Tech", ["es"] = "Bienvenido a Carneiro Tech" }, - ["hero.tagline"] = new() { ["pt"] = "É um prazer ter você aqui!", ["en"] = "It's A Great Pleasure To Have You Here!", ["es"] = "¡Es un placer tenerlo aquí!" }, - ["hero.title"] = new() { ["pt"] = "Solution Design & Consultoria Técnica", ["en"] = "Solution Design & Technical Consulting", ["es"] = "Solution Design y Consultoría Técnica" }, - ["hero.cta"] = new() { ["pt"] = "Saiba Mais", ["en"] = "Learn More", ["es"] = "Más Información" }, + ["hero.greeting"] = new() { ["pt"] = "CarneiroTech SBC", ["en"] = "CarneiroTech SBC", ["es"] = "CarneiroTech SBC" }, + ["hero.tagline"] = new() { ["pt"] = "Assistência Técnica Premium no Golden Square", ["en"] = "Premium Tech Support at Golden Square", ["es"] = "Soporte Técnico Premium" }, + ["hero.title"] = new() { ["pt"] = "Não Descarte seu PC. Faça um Upgrade de Elite!", ["en"] = "Don't Toss Your PC. Elite Upgrade.", ["es"] = "No Descarte su PC. Upgrade de Elite." }, + ["hero.cta"] = new() { ["pt"] = "Agendar Avaliação", ["en"] = "Schedule Evaluation", ["es"] = "Agendar Evaluación" }, // Services Section - ["services.title"] = new() { ["pt"] = "Serviços", ["en"] = "Services", ["es"] = "Servicios" }, - ["services.subtitle"] = new() { ["pt"] = "Transformando desafios técnicos em soluções eficientes", ["en"] = "Transforming technical challenges into efficient solutions", ["es"] = "Transformando desafíos técnicos en soluciones eficientes" }, + ["services.title"] = new() { ["pt"] = "O que fazemos", ["en"] = "What we do", ["es"] = "Qué hacemos" }, + ["services.subtitle"] = new() { ["pt"] = "Tecnologia de ponta para quem não aceita lentidão.", ["en"] = "Top technology for those who don't accept slowness.", ["es"] = "Tecnología de punta para quienes no aceptan lentitud." }, - ["service.solution.title"] = new() { ["pt"] = "Solution Design", ["en"] = "Solution Design", ["es"] = "Solution Design" }, - ["service.solution.desc"] = new() { ["pt"] = "Desenho completo de soluções técnicas, identificando requisitos não explícitos e propondo arquiteturas adequadas ao contexto do projeto.", ["en"] = "Complete technical solution design, identifying non-explicit requirements and proposing architectures suitable for the project context.", ["es"] = "Diseño completo de soluciones técnicas, identificando requisitos no explícitos y proponiendo arquitecturas adecuadas al contexto del proyecto." }, + ["service.solution.title"] = new() { ["pt"] = "Upgrade de Performance", ["en"] = "Performance Upgrade", ["es"] = "Upgrade de Performance" }, + ["service.solution.desc"] = new() { ["pt"] = "Transformamos notebooks lentos em máquinas de alta performance com SSDs NVMe e RAM de baixa latência.", ["en"] = "We turn slow laptops into high-performance machines with NVMe SSDs and low-latency RAM.", ["es"] = "Transformamos laptops lentos en máquinas de alto rendimiento con SSD NVMe y RAM de baja latencia." }, - ["service.modernization.title"] = new() { ["pt"] = "Modernização de Sistemas", ["en"] = "System Modernization", ["es"] = "Modernización de Sistemas" }, - ["service.modernization.desc"] = new() { ["pt"] = "Migração de aplicações legadas para tecnologias modernas, com estratégias que garantem continuidade operacional e zero downtime.", ["en"] = "Migration of legacy applications to modern technologies, with strategies that ensure operational continuity and zero downtime.", ["es"] = "Migración de aplicaciones heredadas a tecnologías modernas, con estrategias que garantizan continuidad operativa y cero tiempo de inactividad." }, + ["service.modernization.title"] = new() { ["pt"] = "Limpeza Técnica", ["en"] = "Technical Cleaning", ["es"] = "Limpieza Técnica" }, + ["service.modernization.desc"] = new() { ["pt"] = "Troca de pasta térmica industrial e remoção de oxidação. Seu equipamento frio e silencioso como novo.", ["en"] = "Industrial thermal paste replacement and oxidation removal. Your gear cool and quiet as new.", ["es"] = "Cambio de pasta térmica industrial y remoción de oxidación. Su equipo frío y silencioso." }, - ["service.architecture.title"] = new() { ["pt"] = "Arquitetura de Software", ["en"] = "Software Architecture", ["es"] = "Arquitectura de Software" }, - ["service.architecture.desc"] = new() { ["pt"] = "Definição de arquiteturas escaláveis, resilientes e adequadas ao contexto, considerando custos, prazos e manutenibilidade.", ["en"] = "Definition of scalable, resilient and context-appropriate architectures, considering costs, deadlines and maintainability.", ["es"] = "Definición de arquitecturas escalables, resilientes y adecuadas al contexto, considerando costos, plazos y mantenibilidad." }, + ["service.architecture.title"] = new() { ["pt"] = "Reparo Estrutural", ["en"] = "Structural Repair", ["es"] = "Reparo Estructural" }, + ["service.architecture.desc"] = new() { ["pt"] = "Reconstrução de carcaças e dobradiças com resina industrial. Recuperamos o que outros mandam descartar.", ["en"] = "Case and hinge reconstruction with industrial resin. We recover what others tell you to discard.", ["es"] = "Reconstrucción de carcasas y bisagras con resina industrial." }, - ["service.consulting.title"] = new() { ["pt"] = "Consultoria Técnica", ["en"] = "Technical Consulting", ["es"] = "Consultoría Técnica" }, - ["service.consulting.desc"] = new() { ["pt"] = "Análise de viabilidade técnica, otimização de processos, redução de custos operacionais e escolha de tecnologias adequadas.", ["en"] = "Technical feasibility analysis, process optimization, operational cost reduction and selection of appropriate technologies.", ["es"] = "Análisis de viabilidad técnica, optimización de procesos, reducción de costos operativos y selección de tecnologías adecuadas." }, + ["service.consulting.title"] = new() { ["pt"] = "Avaliação Técnica", ["en"] = "Technical Evaluation", ["es"] = "Evaluación Técnica" }, + ["service.consulting.desc"] = new() { ["pt"] = "Diagnóstico real e honesto. Se outros disseram que não tem conserto, traga para quem conhece a engenharia.", ["en"] = "Real and honest diagnosis. If others said it's unfixable, bring it to those who know the engineering.", ["es"] = "Diagnóstico real y honesto. Si otros dijeron que no tiene arreglo, tráigalo." }, // Portfolio/Cases Section - ["portfolio.title"] = new() { ["pt"] = "Cases de Sucesso", ["en"] = "Success Cases", ["es"] = "Casos de Éxito" }, - ["portfolio.subtitle"] = new() { ["pt"] = "Conheça alguns projetos onde transformei desafios complexos em soluções elegantes", ["en"] = "Discover some projects where I transformed complex challenges into elegant solutions", ["es"] = "Conozca algunos proyectos donde transformé desafíos complejos en soluciones elegantes" }, - ["portfolio.viewcase"] = new() { ["pt"] = "Ver Case Completo", ["en"] = "View Full Case", ["es"] = "Ver Caso Completo" }, + ["portfolio.title"] = new() { ["pt"] = "Prova Real", ["en"] = "Real Proof", ["es"] = "Prueba Real" }, + ["portfolio.subtitle"] = new() { ["pt"] = "Veja como economizamos milhares de reais para nossos clientes de SBC", ["en"] = "See how we saved thousands of reais for our SBC clients", ["es"] = "Vea cómo ahorramos miles de reales para nuestros clientes" }, + ["portfolio.viewcase"] = new() { ["pt"] = "Ver Detalhes do Resgate", ["en"] = "View Rescue Details", ["es"] = "Ver Detalles del Rescate" }, // About Section - ["about.title"] = new() { ["pt"] = "Sobre", ["en"] = "About", ["es"] = "Acerca de" }, - ["about.subtitle"] = new() { ["pt"] = "Mais de uma década transformando desafios técnicos em soluções eficientes", ["en"] = "Over a decade transforming technical challenges into efficient solutions", ["es"] = "Más de una década transformando desafíos técnicos en soluciones eficientes" }, + ["about.title"] = new() { ["pt"] = "20 Anos de Engenharia", ["en"] = "20 Years of Engineering", ["es"] = "20 Años de Ingeniería" }, + ["about.subtitle"] = new() { ["pt"] = "Eu conheço a tecnologia por dentro. Por isso sei como consertar.", ["en"] = "I know tech from the inside. That's why I know how to fix it.", ["es"] = "Conozco la tecnología por dentro. Por eso sé cómo arreglarla." }, ["about.text"] = new() { - ["pt"] = "Com mais de 10 anos de experiência em engenharia de software, especializei-me em Solution Design, arquitetura de sistemas e modernização de aplicações legadas. Meu diferencial está em identificar requisitos não explícitos, antecipar problemas e propor soluções que realmente funcionam na prática.", - ["en"] = "With over 10 years of experience in software engineering, I specialize in Solution Design, systems architecture and legacy application modernization. My differential is in identifying non-explicit requirements, anticipating problems and proposing solutions that really work in practice.", - ["es"] = "Con más de 10 años de experiencia en ingeniería de software, me especializo en Solution Design, arquitectura de sistemas y modernización de aplicaciones heredadas. Mi diferencial está en identificar requisitos no explícitos, anticipar problemas y proponer soluciones que realmente funcionan en la práctica." + ["pt"] = "Com mais de 20 anos abrindo e montando o que há de melhor em hardware, minha missão na CarneiroTech é trazer o nível de excelência enterprise para o seu notebook pessoal. Não somos apenas uma assistência; somos especialistas em performance e longevidade de hardware em São Bernardo do Campo.", + ["en"] = "With over 20 years opening and building the best in hardware, my mission at CarneiroTech is to bring enterprise-level excellence to your personal laptop. We are not just support; we are specialists in performance and hardware longevity in SBC.", + ["es"] = "Con más de 20 años abriendo y montando lo mejor en hardware, mi misión en CarneiroTech é traer la excelencia enterprise a su laptop personal." }, // Contact Section - ["contact.title"] = new() { ["pt"] = "Contato", ["en"] = "Contact", ["es"] = "Contacto" }, - ["contact.subtitle"] = new() { ["pt"] = "Vamos conversar sobre seu desafio técnico", ["en"] = "Let's talk about your technical challenge", ["es"] = "Hablemos sobre su desafío técnico" }, + ["contact.title"] = new() { ["pt"] = "Agendar Avaliação", ["en"] = "Schedule Evaluation", ["es"] = "Agendar Evaluación" }, + ["contact.subtitle"] = new() { ["pt"] = "Atendimento com hora marcada no Golden Square Shopping", ["en"] = "By appointment at Golden Square Mall", ["es"] = "Atención con hora marcada en Golden Square Shopping" }, ["contact.name"] = new() { ["pt"] = "Seu Nome *", ["en"] = "Your Name *", ["es"] = "Su Nombre *" }, ["contact.email"] = new() { ["pt"] = "Seu Email *", ["en"] = "Your Email *", ["es"] = "Su Email *" }, - ["contact.phone"] = new() { ["pt"] = "Seu Telefone", ["en"] = "Your Phone", ["es"] = "Su Teléfono" }, - ["contact.message"] = new() { ["pt"] = "Sua Mensagem *", ["en"] = "Your Message *", ["es"] = "Su Mensaje *" }, - ["contact.send"] = new() { ["pt"] = "Enviar Mensagem", ["en"] = "Send Message", ["es"] = "Enviar Mensaje" }, + ["contact.phone"] = new() { ["pt"] = "Seu WhatsApp", ["en"] = "Your WhatsApp", ["es"] = "Su WhatsApp" }, + ["contact.message"] = new() { ["pt"] = "Qual o modelo do seu equipamento e o problema? *", ["en"] = "What's your device model and problem? *", ["es"] = "¿Cuál es el modelo y el problema? *" }, + ["contact.send"] = new() { ["pt"] = "Solicitar Horário", ["en"] = "Request Time Slot", ["es"] = "Solicitar Horario" }, ["contact.sending"] = new() { ["pt"] = "Enviando...", ["en"] = "Sending...", ["es"] = "Enviando..." }, - ["contact.success"] = new() { ["pt"] = "Mensagem enviada com sucesso! Entraremos em contato em breve.", ["en"] = "Message sent successfully! We'll get in touch soon.", ["es"] = "¡Mensaje enviado con éxito! Nos pondremos en contacto pronto." }, - ["contact.error"] = new() { ["pt"] = "Erro ao enviar mensagem. Por favor, tente novamente ou entre em contato via email.", ["en"] = "Error sending message. Please try again or contact via email.", ["es"] = "Error al enviar mensaje. Por favor, inténtelo de nuevo o contacte por email." }, + ["contact.success"] = new() { ["pt"] = "Solicitação enviada! Chamaremos você no WhatsApp em breve.", ["en"] = "Request sent! We'll WhatsApp you shortly.", ["es"] = "¡Solicitud enviada! Lo llamaremos pronto." }, ["contact.or"] = new() { ["pt"] = "OU", ["en"] = "OR", ["es"] = "O" }, - ["contact.whatsapp"] = new() { ["pt"] = "Falar via WhatsApp", ["en"] = "Chat on WhatsApp", ["es"] = "Hablar por WhatsApp" }, - ["contact.whatsapp.subtitle"] = new() { ["pt"] = "Resposta rápida e direta", ["en"] = "Quick and direct response", ["es"] = "Respuesta rápida y directa" }, + ["contact.whatsapp"] = new() { ["pt"] = "Agendar via WhatsApp", ["en"] = "Schedule via WhatsApp", ["es"] = "Agendar por WhatsApp" }, + ["contact.whatsapp.subtitle"] = new() { ["pt"] = "Fale direto com o técnico", ["en"] = "Talk directly with the tech", ["es"] = "Hable directo con el técnico" }, + ["contact.whatsapp.message"] = new() { + ["pt"] = "[Site] Olá! Gostaria de agendar uma avaliação para meu notebook no Golden Square.", + ["en"] = "[Website] Hello! I'd like to schedule an evaluation for my laptop at Golden Square.", + ["es"] = "[Sitio] ¡Hola! Me gustaría agendar una evaluación para mi laptop." + }, // Footer - ["footer.rights"] = new() { ["pt"] = "Carneiro Tech - Todos os direitos reservados", ["en"] = "Carneiro Tech - All rights reserved", ["es"] = "Carneiro Tech - Todos los derechos reservados" }, + ["footer.rights"] = new() { ["pt"] = "CarneiroTech SBC - Todos os direitos reservados", ["en"] = "CarneiroTech SBC - All rights reserved", ["es"] = "CarneiroTech SBC" }, ["footer.privacy"] = new() { ["pt"] = "Privacidade", ["en"] = "Privacy", ["es"] = "Privacidad" }, ["footer.terms"] = new() { ["pt"] = "Termos", ["en"] = "Terms", ["es"] = "Términos" }, + ["footer.copyright"] = new() { ["pt"] = "Copyright © {year} CarneiroTech - Ricardo Carneiro", ["en"] = "Copyright © {year} CarneiroTech - Ricardo Carneiro", ["es"] = "Copyright © {year} CarneiroTech - Ricardo Carneiro" }, // Case Details ["case.client"] = new() { ["pt"] = "Cliente", ["en"] = "Client", ["es"] = "Cliente" }, - ["case.industry"] = new() { ["pt"] = "Indústria", ["en"] = "Industry", ["es"] = "Industria" }, - ["case.timeline"] = new() { ["pt"] = "Prazo", ["en"] = "Timeline", ["es"] = "Plazo" }, - ["case.role"] = new() { ["pt"] = "Papel", ["en"] = "Role", ["es"] = "Rol" }, - ["case.tags"] = new() { ["pt"] = "Tecnologias", ["en"] = "Technologies", ["es"] = "Tecnologías" }, - ["case.back"] = new() { ["pt"] = "← Voltar para Cases", ["en"] = "← Back to Cases", ["es"] = "← Volver a Casos" }, + ["case.category"] = new() { ["pt"] = "Categoria", ["en"] = "Category", ["es"] = "Categoría" }, + ["case.device"] = new() { ["pt"] = "Dispositivo", ["en"] = "Device", ["es"] = "Dispositivo" }, + ["case.savings"] = new() { ["pt"] = "Economia", ["en"] = "Savings", ["es"] = "Ahorro" }, + ["case.tags"] = new() { ["pt"] = "Intervenções", ["en"] = "Interventions", ["es"] = "Intervenciones" }, + ["case.back"] = new() { ["pt"] = "Ver outros cases", ["en"] = "View other cases", ["es"] = "Ver otros casos" }, + ["case.contact"] = new() { ["pt"] = "Quero esse resultado", ["en"] = "I want this result", ["es"] = "Quiero este resultado" }, + + // Home Page + ["home.seo.title"] = new() { ["pt"] = "CarneiroTech SBC - Upgrade e Manutenção Premium de Notebooks", ["en"] = "CarneiroTech SBC - Premium Laptop Upgrade & Maintenance", ["es"] = "CarneiroTech SBC - Upgrade y Mantenimiento Premium" }, + ["home.seo.description"] = new() { ["pt"] = "Especialista em upgrades de performance e recuperação de notebooks em São Bernardo do Campo. Atendimento no Golden Square Shopping.", ["en"] = "Specialist in performance upgrades and laptop recovery in SBC. Service at Golden Square Mall.", ["es"] = "Especialista en upgrades de performance y recuperación de laptops en SBC." }, + ["home.seo.keywords"] = new() { ["pt"] = "manutenção notebook SBC, upgrade ssd notebook, conserto notebook golden square, assistência técnica SBC, ricardo carneiro", ["en"] = "laptop repair SBC, ssd upgrade, golden square mall tech support", ["es"] = "mantenimiento laptop SBC, upgrade ssd, asistencia tecnica SBC" }, + + ["home.masthead.subheading"] = new() { ["pt"] = "Assistência Técnica Premium em SBC", ["en"] = "Premium Tech Support in SBC", ["es"] = "Soporte Técnico Premium en SBC" }, + ["home.masthead.heading"] = new() { ["pt"] = "Não Descarte seu PC. Faça um Upgrade de Elite!", ["en"] = "Don't Toss Your PC. Elite Upgrade.", ["es"] = "No Descarte su PC. Upgrade de Elite." }, + ["home.masthead.cta"] = new() { ["pt"] = "Agendar no Golden Square", ["en"] = "Schedule at Golden Square", ["es"] = "Agendar en Golden Square" }, + + ["home.services.title"] = new() { ["pt"] = "Nossas Especialidades", ["en"] = "Our Specialties", ["es"] = "Nuestras Especialidades" }, + ["home.services.subtitle"] = new() { ["pt"] = "Tratamos seu equipamento como uma peça de engenharia.", ["en"] = "We treat your gear like a piece of engineering.", ["es"] = "Tratamos su equipo como una pieza de ingeniería." }, + + ["home.services.solution.title"] = new() { ["pt"] = "Upgrade de Performance", ["en"] = "Performance Upgrade", ["es"] = "Upgrade de Performance" }, + ["home.services.solution.desc"] = new() { ["pt"] = "SSDs NVMe de alta velocidade e memórias de baixa latência para dar vida nova ao seu computador.", ["en"] = "High-speed NVMe SSDs and low-latency RAM to breathe new life into your computer.", ["es"] = "SSDs NVMe de alta velocidad y memorias de baja latencia." }, + + ["home.services.consulting.title"] = new() { ["pt"] = "Manutenção Preventiva", ["en"] = "Preventive Maintenance", ["es"] = "Mantenimiento Preventivo" }, + ["home.services.consulting.desc"] = new() { ["pt"] = "Limpeza interna profunda e troca de pasta térmica industrial (Arctic Silver) para evitar queimas.", ["en"] = "Deep internal cleaning and industrial thermal paste replacement to prevent burnouts.", ["es"] = "Limpieza interna profunda y cambio de pasta térmica industrial." }, + + ["home.services.proposals.title"] = new() { ["pt"] = "Resgate de Carcaça", ["en"] = "Case Rescue", ["es"] = "Rescate de Carcasa" }, + ["home.services.proposals.desc"] = new() { ["pt"] = "Reparo profissional de dobradiças e plásticos quebrados. Seu notebook abrindo e fechando como novo.", ["en"] = "Professional hinge and broken plastic repair. Your laptop opening and closing like new.", ["es"] = "Reparo profesional de bisagras y plásticos rotos." }, + + ["home.cases.title"] = new() { ["pt"] = "Cases de Sucesso", ["en"] = "Success Cases", ["es"] = "Casos de Éxito" }, + ["home.cases.subtitle"] = new() { ["pt"] = "Equipamentos condenados que voltaram à vida", ["en"] = "Condemned gear that came back to life", ["es"] = "Equipos condenados que volvieron a la vida" }, + ["home.cases.empty"] = new() { ["pt"] = "Novos resgates sendo documentados.", ["en"] = "New rescues being documented.", ["es"] = "Nuevos rescates siendo documentados." }, + ["home.cases.viewall"] = new() { ["pt"] = "Ver todos os resgates", ["en"] = "View all rescues", ["es"] = "Ver todos los rescates" }, + + ["home.about.title"] = new() { ["pt"] = "Por que a CarneiroTech?", ["en"] = "Why CarneiroTech?", ["es"] = "¿Por qué CarneiroTech?" }, + ["home.about.subtitle"] = new() { ["pt"] = "20 anos de experiência real em hardware.", ["en"] = "20 years of real hardware experience.", ["es"] = "20 años de experiencia real en hardware." }, + + ["home.about.timeline.2000_2005.title"] = new() { ["pt"] = "Paixão pelo Hardware", ["en"] = "Passion for Hardware", ["es"] = "Pasión por el Hardware" }, + ["home.about.timeline.2000_2005.text"] = new() { ["pt"] = "Início na eletrônica e montagem de PCs de alta performance. Onde tudo começou.", ["en"] = "Start in electronics and high-performance PC building. Where it all began.", ["es"] = "Inicio en la electrónica y montaje de PCs de alta performance." }, + + ["home.about.timeline.2005_2015.title"] = new() { ["pt"] = "Nível Enterprise", ["en"] = "Enterprise Level", ["es"] = "Nivel Enterprise" }, + ["home.about.timeline.2005_2015.text"] = new() { ["pt"] = "Trabalhando com sistemas críticos onde a falha de hardware significava prejuízos milionários.", ["en"] = "Working with critical systems where hardware failure meant million-dollar losses.", ["es"] = "Trabajando con sistemas críticos." }, + + ["home.about.timeline.2015_2020.title"] = new() { ["pt"] = "Especialista em Diagnóstico", ["en"] = "Diagnostic Specialist", ["es"] = "Especialista en Diagnóstico" }, + ["home.about.timeline.2015_2020.text"] = new() { ["pt"] = "Foco em resolver problemas complexos que assistências comuns não conseguiam diagnosticar.", ["en"] = "Focus on solving complex problems that common shops couldn't diagnose.", ["es"] = "Enfoque en resolver problemas complejos." }, + + ["home.about.timeline.2020_now.title"] = new() { ["pt"] = "Golden Square SBC", ["en"] = "Golden Square SBC", ["es"] = "Golden Square SBC" }, + ["home.about.timeline.2020_now.text"] = new() { ["pt"] = "Fundação da CarneiroTech no coração de SBC, trazendo consultoria técnica de elite para o público final.", ["en"] = "CarneiroTech founded in the heart of SBC, bringing elite technical consulting to the end user.", ["es"] = "Fundación de CarneiroTech en el corazón de SBC." }, + + ["home.about.cta.line1"] = new() { ["pt"] = "Seu", ["en"] = "Your", ["es"] = "Su" }, + ["home.about.cta.line2"] = new() { ["pt"] = "PC Novo", ["en"] = "New PC", ["es"] = "PC Nuevo" }, + ["home.about.cta.line3"] = new() { ["pt"] = "De Novo!", ["en"] = "Again!", ["es"] = "¡De Nuevo!" }, + + ["home.contact.title"] = new() { ["pt"] = "Agendar Avaliação", ["en"] = "Schedule Evaluation", ["es"] = "Agendar Evaluación" }, + ["home.contact.subtitle"] = new() { ["pt"] = "Atendimento no Golden Square Shopping", ["en"] = "Service at Golden Square Mall", ["es"] = "Atención en Golden Square Shopping" }, + ["home.contact.name"] = new() { ["pt"] = "Seu Nome", ["en"] = "Your Name", ["es"] = "Su Nombre" }, + ["home.contact.email"] = new() { ["pt"] = "Seu Email", ["en"] = "Your Email", ["es"] = "Su Email" }, + ["home.contact.phone"] = new() { ["pt"] = "Seu WhatsApp", ["en"] = "Your WhatsApp", ["es"] = "Su WhatsApp" }, + ["home.contact.message"] = new() { ["pt"] = "Modelo do equipamento e sintomas", ["en"] = "Device model and symptoms", ["es"] = "Modelo y síntomas" }, + ["home.contact.send"] = new() { ["pt"] = "Solicitar Horário", ["en"] = "Request Time Slot", ["es"] = "Solicitar Horario" }, + ["home.contact.sending"] = new() { ["pt"] = "Enviando...", ["en"] = "Sending...", ["es"] = "Enviando..." }, + ["home.contact.success"] = new() { ["pt"] = "Solicitação recebida! Faremos o agendamento via WhatsApp.", ["en"] = "Request received! We'll schedule via WhatsApp.", ["es"] = "¡Solicitud recibida!" }, + ["home.contact.or"] = new() { ["pt"] = "OU", ["en"] = "OR", ["es"] = "O" }, + ["home.contact.whatsapp"] = new() { ["pt"] = "Chamar no WhatsApp", ["en"] = "WhatsApp Us", ["es"] = "Llamar por WhatsApp" }, + ["home.contact.whatsapp.subtitle"] = new() { ["pt"] = "Fale agora com o Ricardo", ["en"] = "Talk to Ricardo now", ["es"] = "Hable ahora con Ricardo" }, + + // Cases UI + ["cases.header.subtitle"] = new() { ["pt"] = "Resgates", ["en"] = "Rescues", ["es"] = "Rescates" }, + ["cases.header.title"] = new() { ["pt"] = "Cases de Sucesso", ["en"] = "Success Cases", ["es"] = "Casos de Éxito" }, + ["cases.filter.title"] = new() { ["pt"] = "Filtrar por tipo:", ["en"] = "Filter by type:", ["es"] = "Filtrar por tipo:" }, + ["cases.filter.all"] = new() { ["pt"] = "Todos", ["en"] = "All", ["es"] = "Todos" }, + ["cases.empty.tag"] = new() { ["pt"] = "Nenhum resgate com \"{0}\".", ["en"] = "No rescues with \"{0}\".", ["es"] = "Sin rescates con \"{0}\"." }, + ["cases.empty.all"] = new() { ["pt"] = "Novos resgates em breve.", ["en"] = "New rescues soon.", ["es"] = "Nuevos rescates pronto." }, + ["cases.viewall"] = new() { ["pt"] = "Ver Todos", ["en"] = "View All", ["es"] = "Ver Todos" }, + ["cases.seo.title"] = new() { ["pt"] = "Resgates de Hardware - CarneiroTech SBC", ["en"] = "Hardware Rescues - CarneiroTech SBC", ["es"] = "Rescates de Hardware" }, + ["cases.seo.title.tag"] = new() { ["pt"] = "Resgates: {0} - CarneiroTech", ["en"] = "Rescues: {0} - CarneiroTech", ["es"] = "Rescates: {0} - CarneiroTech" }, + ["cases.seo.description"] = new() { ["pt"] = "Explore como transformamos máquinas condenadas em ferramentas de elite.", ["en"] = "Explore how we turn condemned machines into elite tools.", ["es"] = "Vea cómo transformamos máquinas condenadas." }, + ["cases.seo.description.tag"] = new() { ["pt"] = "Resgates de hardware relacionados a {0}.", ["en"] = "Hardware rescues related to {0}.", ["es"] = "Rescates relacionados con {0}." }, + // Privacy + ["privacy.title"] = new() { ["pt"] = "Política de Privacidade", ["en"] = "Privacy Policy", ["es"] = "Política de Privacidad" }, + ["privacy.body"] = new() { ["pt"] = "Seus dados estão seguros conosco e serão usados apenas para agendamento técnico.", ["en"] = "Your data is safe and will only be used for technical scheduling.", ["es"] = "Sus datos están seguros." }, // WhatsApp message ["whatsapp.message"] = new() { - ["pt"] = "[Site] Olá! Eu gostaria de conversar com você sobre uma possível proposta comercial.", - ["en"] = "[Website] Hello! I would like to talk to you about a possible business proposal.", - ["es"] = "[Sitio] ¡Hola! Me gustaría hablar con usted sobre una posible propuesta comercial." + ["pt"] = "Olá! Gostaria de um orçamento para o meu aparelho.", + ["en"] = "Hello! I would like a quote for my device.", + ["es"] = "¡Hola! Me gustaría um presupuesto para mi equipo." }, }; diff --git a/Services/CaseService.cs b/Services/CaseService.cs index c9eaeda..cb12772 100644 --- a/Services/CaseService.cs +++ b/Services/CaseService.cs @@ -9,62 +9,33 @@ public class CaseService : ICaseService private readonly IMarkdownService _markdownService; private readonly IMemoryCache _cache; private readonly IWebHostEnvironment _environment; - private readonly ILanguageService _languageService; - private readonly IHttpContextAccessor _httpContextAccessor; - private readonly string _casesBasePath; - private const string CACHE_KEY_PREFIX = "cases_"; + private readonly string _casesPath; + private const string CACHE_KEY = "cases_retail"; private const int CACHE_MINUTES = 60; - public CaseService(IMarkdownService markdownService, IMemoryCache cache, IWebHostEnvironment environment, - ILanguageService languageService, IHttpContextAccessor httpContextAccessor) + public CaseService(IMarkdownService markdownService, IMemoryCache cache, IWebHostEnvironment environment) { _markdownService = markdownService; _cache = cache; _environment = environment; - _languageService = languageService; - _httpContextAccessor = httpContextAccessor; - _casesBasePath = Path.Combine(_environment.ContentRootPath, "Content", "Cases"); - } - - private string GetCurrentLanguage() - { - if (_httpContextAccessor.HttpContext != null) - { - return _languageService.GetCurrentLanguage(_httpContextAccessor.HttpContext); - } - return "pt"; // Default fallback - } - - private string GetCasesPath(string language) - { - return Path.Combine(_casesBasePath, language); + _casesPath = Path.Combine(_environment.ContentRootPath, "Content", "Cases", "Retail"); } public async Task> GetAllCasesAsync() { - var language = GetCurrentLanguage(); - var cacheKey = $"{CACHE_KEY_PREFIX}{language}"; - - if (_cache.TryGetValue(cacheKey, out List? cachedCases) && cachedCases != null) + if (_cache.TryGetValue(CACHE_KEY, out List? cachedCases) && cachedCases != null) { return cachedCases; } var cases = new List(); - var casesPath = GetCasesPath(language); - if (!Directory.Exists(casesPath)) + if (!Directory.Exists(_casesPath)) { - // Try Portuguese as fallback - casesPath = GetCasesPath("pt"); - if (!Directory.Exists(casesPath)) - { - Directory.CreateDirectory(casesPath); - return cases; - } + return cases; } - var markdownFiles = Directory.GetFiles(casesPath, "*.md"); + var markdownFiles = Directory.GetFiles(_casesPath, "*.md"); foreach (var file in markdownFiles) { @@ -75,12 +46,11 @@ public class CaseService : ICaseService } } - // Sort by order (ascending) and date (descending) cases = cases.OrderBy(c => c.Metadata.Order) .ThenByDescending(c => c.Metadata.Date) .ToList(); - _cache.Set(cacheKey, cases, TimeSpan.FromMinutes(CACHE_MINUTES)); + _cache.Set(CACHE_KEY, cases, TimeSpan.FromMinutes(CACHE_MINUTES)); return cases; } @@ -100,18 +70,13 @@ public class CaseService : ICaseService public async Task> GetAllTagsAsync() { var allCases = await GetAllCasesAsync(); - var tags = allCases.SelectMany(c => c.Metadata.Tags) - .Distinct() - .OrderBy(t => t) - .ToList(); - return tags; + return allCases.SelectMany(c => c.Metadata.Tags).Distinct().OrderBy(t => t).ToList(); } public async Task> GetCasesByTagAsync(string tag) { var allCases = await GetAllCasesAsync(); - return allCases.Where(c => c.Metadata.Tags.Contains(tag, StringComparer.OrdinalIgnoreCase)) - .ToList(); + return allCases.Where(c => c.Metadata.Tags.Contains(tag, StringComparer.OrdinalIgnoreCase)).ToList(); } private async Task ParseCaseFileAsync(string filePath) @@ -127,9 +92,10 @@ public class CaseService : ICaseService Slug = GetStringValue(frontMatter, "slug"), Summary = GetStringValue(frontMatter, "summary"), Client = GetStringValue(frontMatter, "client"), - Industry = GetStringValue(frontMatter, "industry"), - Timeline = GetStringValue(frontMatter, "timeline"), - Role = GetStringValue(frontMatter, "role"), + DeviceModel = GetStringValue(frontMatter, "device_model"), + EstimatedSavings = GetDecimalValue(frontMatter, "estimated_savings"), + Category = GetStringValue(frontMatter, "category"), + Thumbnail = GetStringValue(frontMatter, "thumbnail"), Image = GetStringValue(frontMatter, "image"), Tags = GetListValue(frontMatter, "tags"), Featured = GetBoolValue(frontMatter, "featured"), @@ -140,69 +106,31 @@ public class CaseService : ICaseService SeoKeywords = GetStringValue(frontMatter, "seo_keywords") }; - var htmlContent = _markdownService.ConvertToHtml(bodyContent); - return new CaseModel { Metadata = metadata, - ContentHtml = htmlContent, + ContentHtml = _markdownService.ConvertToHtml(bodyContent), ContentMarkdown = bodyContent }; } - catch (Exception) - { - // Log error here if needed - return null; - } + catch { return null; } } - private string GetStringValue(Dictionary dict, string key) - { - return dict.ContainsKey(key) ? dict[key]?.ToString() ?? string.Empty : string.Empty; - } + private string GetStringValue(Dictionary dict, string key) => + dict.ContainsKey(key) ? dict[key]?.ToString() ?? string.Empty : string.Empty; - private List GetListValue(Dictionary dict, string key) - { - if (!dict.ContainsKey(key)) return new List(); + private decimal GetDecimalValue(Dictionary dict, string key) => + dict.ContainsKey(key) && decimal.TryParse(dict[key]?.ToString(), NumberStyles.Any, CultureInfo.InvariantCulture, out decimal res) ? res : 0; - if (dict[key] is List list) - { - return list.Select(item => item?.ToString() ?? string.Empty).ToList(); - } + private List GetListValue(Dictionary dict, string key) => + dict.ContainsKey(key) && dict[key] is List list ? list.Select(i => i?.ToString() ?? "").ToList() : new(); - return new List(); - } + private bool GetBoolValue(Dictionary dict, string key) => + dict.ContainsKey(key) && (dict[key]?.ToString()?.ToLower() == "true" || dict[key]?.ToString() == "1"); - private bool GetBoolValue(Dictionary dict, string key) - { - if (!dict.ContainsKey(key)) return false; + private int GetIntValue(Dictionary dict, string key) => + dict.ContainsKey(key) && int.TryParse(dict[key]?.ToString(), out int res) ? res : 0; - var value = dict[key]?.ToString()?.ToLower(); - return value == "true" || value == "yes" || value == "1"; - } - - private int GetIntValue(Dictionary dict, string key) - { - if (!dict.ContainsKey(key)) return 0; - - if (int.TryParse(dict[key]?.ToString(), out int result)) - { - return result; - } - - return 0; - } - - private DateTime GetDateValue(Dictionary dict, string key) - { - if (!dict.ContainsKey(key)) return DateTime.MinValue; - - var dateString = dict[key]?.ToString(); - if (DateTime.TryParse(dateString, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime result)) - { - return result; - } - - return DateTime.MinValue; - } + private DateTime GetDateValue(Dictionary dict, string key) => + dict.ContainsKey(key) && DateTime.TryParse(dict[key]?.ToString(), CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime res) ? res : DateTime.MinValue; } diff --git a/Services/LanguageService.cs b/Services/LanguageService.cs index f5b06e1..fa8b2b6 100644 --- a/Services/LanguageService.cs +++ b/Services/LanguageService.cs @@ -29,10 +29,8 @@ namespace CarneiroTech.Services // 2. Detect from browser Accept-Language header var browserLanguage = DetectBrowserLanguage(context); - if (!string.IsNullOrEmpty(browserLanguage)) + if (!string.IsNullOrEmpty(browserLanguage) && _supportedLanguages.Contains(browserLanguage)) { - // Save detected language to cookie - SetLanguage(context, browserLanguage); return browserLanguage; } @@ -48,7 +46,7 @@ namespace CarneiroTech.Services { Expires = DateTimeOffset.UtcNow.AddYears(1), HttpOnly = false, // Allow JavaScript to read for client-side logic - Secure = true, + Secure = context.Request.IsHttps, SameSite = SameSiteMode.Lax, Path = "/" }; @@ -63,7 +61,7 @@ namespace CarneiroTech.Services if (string.IsNullOrEmpty(acceptLanguageHeader)) { - return DefaultLanguage; + return string.Empty; } // Parse Accept-Language header @@ -84,7 +82,7 @@ namespace CarneiroTech.Services } } - return DefaultLanguage; + return string.Empty; } } } diff --git a/Views/Cases/Details.cshtml b/Views/Cases/Details.cshtml index 840d47c..7e39f00 100644 --- a/Views/Cases/Details.cshtml +++ b/Views/Cases/Details.cshtml @@ -1,4 +1,7 @@ @model CarneiroTech.Models.CaseModel +@{ + var lang = LanguageService.GetCurrentLanguage(Context); +}
@@ -14,25 +17,29 @@ @if (!string.IsNullOrEmpty(Model.Metadata.Client)) {
- Cliente: @Model.Metadata.Client + + @SiteStrings.Get("case.client", lang): @Model.Metadata.Client
} - @if (!string.IsNullOrEmpty(Model.Metadata.Industry)) + @if (!string.IsNullOrEmpty(Model.Metadata.DeviceModel)) {
- Indústria: @Model.Metadata.Industry + + @SiteStrings.Get("case.device", lang): @Model.Metadata.DeviceModel
} - @if (!string.IsNullOrEmpty(Model.Metadata.Timeline)) + @if (!string.IsNullOrEmpty(Model.Metadata.Category)) {
- Timeline: @Model.Metadata.Timeline + + @SiteStrings.Get("case.category", lang): @Model.Metadata.Category
} - @if (!string.IsNullOrEmpty(Model.Metadata.Role)) + @if (Model.Metadata.EstimatedSavings > 0) {
- Meu Role: @Model.Metadata.Role + + @SiteStrings.Get("case.savings", lang): R$ @Model.Metadata.EstimatedSavings.ToString("N2")
} @@ -63,10 +70,10 @@ diff --git a/Views/Cases/Index.cshtml b/Views/Cases/Index.cshtml index 4f7782c..cec6212 100644 --- a/Views/Cases/Index.cshtml +++ b/Views/Cases/Index.cshtml @@ -1,5 +1,6 @@ @model List @{ + var lang = LanguageService.GetCurrentLanguage(Context); var selectedTag = ViewData["SelectedTag"] as string; var allTags = ViewBag.AllTags as List ?? new List(); } @@ -7,8 +8,8 @@
-
Portfolio
-
Nossos Cases
+
@SiteStrings.Get("cases.header.subtitle", lang)
+
@SiteStrings.Get("cases.header.title", lang)
@@ -20,9 +21,9 @@ {
-
Filtrar por tecnologia:
+
@SiteStrings.Get("cases.filter.title", lang)
- Todos + @SiteStrings.Get("cases.filter.all", lang) @foreach (var tag in allTags) { @tag @@ -40,29 +41,43 @@ {
-
+

@caseItem.Metadata.Summary

- @if (!string.IsNullOrEmpty(caseItem.Metadata.Image)) + @{ + var imgPath = !string.IsNullOrEmpty(caseItem.Metadata.Thumbnail) ? caseItem.Metadata.Thumbnail : caseItem.Metadata.Image; + } + @if (!string.IsNullOrEmpty(imgPath)) { - @caseItem.Metadata.Title + @caseItem.Metadata.Title } else { -
+
+ +
}
-
-
@caseItem.Metadata.Title
-
@caseItem.Metadata.Industry
+
+
@caseItem.Metadata.Category
+
@caseItem.Metadata.Title
+
+ @caseItem.Metadata.DeviceModel +
+ +
+ @SiteStrings.Get("case.savings", lang) estimada + R$ @caseItem.Metadata.EstimatedSavings.ToString("N2") +
+
@foreach (var tag in caseItem.Metadata.Tags.Take(3)) { - @tag + @tag }
@@ -76,14 +91,14 @@

@if (!string.IsNullOrEmpty(selectedTag)) { - Nenhum case encontrado para a tag "@selectedTag". + @string.Format(SiteStrings.Get("cases.empty.tag", lang), selectedTag) } else { - Em breve, novos cases serão adicionados. + @SiteStrings.Get("cases.empty.all", lang) }

- Ver Todos os Cases + @SiteStrings.Get("cases.viewall", lang)
}
diff --git a/Views/Home/Index.cshtml b/Views/Home/Index.cshtml index 4ff9a49..b6d3ea4 100644 --- a/Views/Home/Index.cshtml +++ b/Views/Home/Index.cshtml @@ -1,14 +1,18 @@ +@using System.Text.Json @model List @{ - ViewData["Title"] = ViewData["Title"] ?? "Carneiro Tech - Solution Design & Technical Consulting"; + ViewData["Title"] = "CarneiroTech SBC - Assistência Técnica Premium e Upgrades"; + ViewData["Description"] = "Não descarte seu notebook. Upgrades de performance e resgate de carcaça em São Bernardo do Campo. Atendimento exclusivo no Golden Square Shopping."; }
-
Solution Design & Technical Consulting
-
Conectando Negócio e Tecnologia
- Saiba Mais +
Assistência Técnica Premium em SBC
+
+
Não descarte seu notebook.
+
Faça um Upgrade de Elite!
+ Agendar no Golden Square
@@ -16,33 +20,33 @@
-

Serviços

-

20+ anos conectando negócio e tecnologia

+

Nossas Especialidades

+

Tratamos seu equipamento como uma peça de engenharia, não apenas um eletrodoméstico.

- + -

Solution Design

-

Desenho de soluções técnicas que conectam objetivos de negócio com arquitetura de sistemas. Experiência com integrações SAP, arquiteturas enterprise e modernização de legados.

+

Upgrade de Performance

+

Transformamos máquinas lentas em foguetes com SSDs NVMe e memórias de alta velocidade. Performance superior a um PC novo por uma fração do preço.

- + -

Technical Consulting

-

Assessoria técnica para tomada de decisão: definição de MVP, priorização de backlog, análise de viabilidade técnica e due diligence de produtos digitais.

+

Limpeza Técnica & Térmica

+

Troca de pasta térmica industrial e remoção de poeira. Evitamos que seu processador queime por superaquecimento e garantimos silêncio total.

- + -

Technical Proposals

-

Elaboração de propostas técnicas detalhadas: estimativas, arquitetura, tecnologias, riscos e roadmap de implementação para projetos complexos.

+

Resgate de Carcaça

+

Reparo profissional de dobradiças e plásticos quebrados com resina industrial. Recuperamos o que outras assistências mandam para o lixo.

@@ -52,109 +56,137 @@
-

Cases

-

Projetos que transformaram negócios

+

Provas de Ressurreição

+

Veja como economizamos milhares de reais para nossos clientes de São Bernardo do Campo.

+ + @if (Model != null && Model.Any(m => m.Metadata.Slug.Contains("vostro"))) + { + var featuredCase = Model.First(m => m.Metadata.Slug.Contains("vostro")); +
+
+
+
+
+ Resultado do Resgate +
+
+
+
DESTAQUE: RESGATE EM SBC
+

@featuredCase.Metadata.Title

+

@featuredCase.Metadata.Summary

+
+ Economia Real: R$ @featuredCase.Metadata.EstimatedSavings.ToString("N2") +
+ Ver como foi feito +
+
+
+
+
+
+ } +
@if (Model != null && Model.Any()) { - foreach (var caseItem in Model.Take(6)) + foreach (var caseItem in Model.Where(m => !m.Metadata.Slug.Contains("vostro")).Take(3)) {
- -
+
-
-

@caseItem.Metadata.Summary

-
+
- @if (!string.IsNullOrEmpty(caseItem.Metadata.Image)) - { - @caseItem.Metadata.Title - } - else - { -
+ @{ + var imgPath = !string.IsNullOrEmpty(caseItem.Metadata.Thumbnail) ? caseItem.Metadata.Thumbnail : caseItem.Metadata.Image; } + ...
-
-
@caseItem.Metadata.Title
-
@caseItem.Metadata.Industry
+
+
@caseItem.Metadata.Title
+
@caseItem.Metadata.Category
+
Economia: R$ @caseItem.Metadata.EstimatedSavings.ToString("N2")
} } - else - { -
-

Em breve, novos cases serão adicionados.

-
- } -
-
- +
-

Sobre

-

Ricardo Carneiro - Carneiro Tech

+

Como Funciona nosso Atendimento

+

Transparência total e engenharia de precisão para seu equipamento.

+ +
+ Agilidade: Diagnóstico e orçamento possíveis em até 24 horas (dependendo da complexidade do caso). +
  • -
    +
    -

    2000-2005

    -

    Início da Jornada

    +

    Passo 1

    +

    Agendamento no Golden Square

    -

    Primeiros passos na tecnologia: desenvolvimento web, banco de dados e sistemas corporativos. Formação sólida em Engenharia de Software.

    +

    Você escolhe o melhor horário para deixar seu equipamento em nosso ponto de atendimento exclusivo no Golden Square Shopping, São Bernardo do Campo.

  • -
    +
    -

    2005-2015

    -

    Enterprise & SAP

    +

    Passo 2

    +

    Checklist e Laudo Digital

    -

    Especialização em integrações SAP e arquiteturas enterprise. Projetos em multinacionais nos setores healthcare, varejo e manufatura.

    +

    Sua segurança em primeiro lugar. Tiramos fotos e emitimos um laudo de recebimento detalhando o estado atual do seu equipamento na hora, enviado direto pro seu WhatsApp.

  • -
    +
    -

    2015-2020

    -

    Digital Transformation

    +

    Passo 3

    +

    Diagnóstico de Engenharia

    -

    Liderança técnica em transformação digital: cloud migration, modernização de legados e implementação de metodologias ágeis.

    +

    Nada de "tentativa e erro". Analisamos a eletrônica e o software do seu PC com ferramentas avançadas para identificar o problema real e a melhor solução com a maior agilidade possível.

  • -
    +
    -

    2020-Hoje

    -

    Consultoria Independente

    +

    Passo 4

    +

    Orçamento Transparente

    -

    Fundação da Carneiro Tech: consultoria especializada em Solution Design, Technical Proposals e Due Diligence para empresas de diversos portes.

    +

    Enviamos o diagnóstico e o orçamento detalhado via WhatsApp. Você só aprova se o valor fizer sentido para a vida útil do seu computador.

    +
    +
  • +
  • +
    +
    +
    +

    Passo 5

    +

    Entrega e Performance

    +
    +

    Você retira sua máquina ressurgida, limpa e com orientações técnicas de uso para garantir que ela dure por mais muitos anos com performance máxima.

  • -

    - Vamos +

    + PC Novo
    - Trabalhar + De
    - Juntos! + Novo!

  • @@ -166,13 +198,10 @@
    -

    Contato

    -

    Vamos conversar sobre seu desafio técnico

    +

    Agendar Avaliação

    +

    Atendimento exclusivo com hora marcada no Golden Square Shopping.

    - - -
    @@ -183,120 +212,61 @@
    - +
    - +
    -
    OU
    - - - Falar via WhatsApp + (11) 97682-2169
    - Resposta rápida e direta + Atendimento imediato via WhatsApp
+@section Scripts { +} diff --git a/Views/Home/Privacy.cshtml b/Views/Home/Privacy.cshtml index af4fb19..5dbdf51 100644 --- a/Views/Home/Privacy.cshtml +++ b/Views/Home/Privacy.cshtml @@ -1,6 +1,7 @@ @{ - ViewData["Title"] = "Privacy Policy"; + var lang = LanguageService.GetCurrentLanguage(Context); + ViewData["Title"] = SiteStrings.Get("privacy.title", lang); }

@ViewData["Title"]

-

Use this page to detail your site's privacy policy.

+

@SiteStrings.Get("privacy.body", lang)

diff --git a/Views/Shared/_Layout.cshtml b/Views/Shared/_Layout.cshtml index 0ba75d9..9bfe428 100644 --- a/Views/Shared/_Layout.cshtml +++ b/Views/Shared/_Layout.cshtml @@ -1,4 +1,7 @@ - +@{ + var lang = "pt"; // Site agora estritamente em PT-BR +} + @@ -7,8 +10,8 @@ @ViewData["Title"] - - + + @@ -16,15 +19,9 @@ - + - - - - - - @@ -38,37 +35,32 @@ - + - - - @@ -76,7 +68,7 @@
Carneiro Tech - Carneiro Tech + CarneiroTech @@ -110,12 +97,14 @@