expand role details, add metrics
This commit is contained in:
31
resume.tex
31
resume.tex
@@ -132,7 +132,7 @@
|
||||
\begin{multicols}{2}
|
||||
\small{
|
||||
\textbf{Languages}{: C/C++, Python, Java, Bash, Typescript} \\
|
||||
\textbf{Frameworks}{: Hadoop, Airflow, Kubernetes, Docker, OpenCV} \\
|
||||
\textbf{Frameworks}{: Hadoop, Apache Airflow, Kubernetes, Docker, OpenCV} \\
|
||||
\textbf{Databases/Lakehouses}{: Starburst, Iceberg, Hive,
|
||||
CockroachDB, OracleDB }\\
|
||||
\textbf{OS}{: NixOS, RHEL 8, Debian, Ubuntu Server, Windows} \\
|
||||
@@ -155,15 +155,19 @@
|
||||
{JPMorgan Chase}{Jersey City, NJ}
|
||||
\resumeItemListStart
|
||||
\resumeItem{Created configurable data ingestion framework for
|
||||
automated data refinement and movement for hybrid data lake}
|
||||
automated data refinement and movement for hybrid data lake,
|
||||
responsible for 200+ dataset pipelines across 3 applications,
|
||||
CTAS materialization for 20+ reporting dashboards,
|
||||
data quality monitoring, and data validation}
|
||||
\resumeItem{Architected and implemented an orchestration system based
|
||||
on Apache airflow for both event-based and SLA-based data ingestion}
|
||||
on Apache Airflow for both event-based and SLA-based data ingestion}
|
||||
\resumeItem{Led weekly office hour sessions to demonstrate and assist
|
||||
with onboarding and configuring new datasets}
|
||||
\resumeItem{Trained a team of developers to support the custom
|
||||
\resumeItem{Trained a team of ten developers to support the custom
|
||||
frameworks for use across multiple applications}
|
||||
\resumeItem{Created helm charts for making platform services
|
||||
available and configurable to other teams }
|
||||
\resumeItem{Created Helm charts for making platform UI services
|
||||
available and configurable to other four other platform teams and their applications,
|
||||
using a common service layer to allow for easy integration and maintenance}
|
||||
\resumeItemListEnd
|
||||
|
||||
%% create a new resume item below for a software engineering job at
|
||||
@@ -173,19 +177,22 @@ available and configurable to other teams }
|
||||
{Site Reliability Engineer}{Jul. 2022 -- Jan. 2025}
|
||||
{JPMorgan Chase}{Jersey City, NJ}
|
||||
\resumeItemListStart
|
||||
\resumeItem{Supported 30 applications}
|
||||
\resumeItem{Provided on-call support for 30 applications across multiple
|
||||
teams, including Permit to Deploy/Operate process, incident response, root cause analysis, and
|
||||
post-mortems}
|
||||
\resumeItem{SME for Hadoop data lake, means additional support for
|
||||
Tableau dashboards, Kubernetes applications, cloud foundry
|
||||
applications, maintaining dremio instance, S3 compatible object store}
|
||||
Tableau dashboards, Kubernetes applications, Cloud Foundry
|
||||
applications, maintaining Dremio instance, AWS S3 compatible object store}
|
||||
\resumeItem{SME for Linux systems, Networking (firewalls, load
|
||||
balancers, etc.), Hadoop}
|
||||
\resumeItem{Led toil reduction and noisy alert reduction across our
|
||||
applications}
|
||||
applications by 40\% through automated recovery, monitoring, and alerting}
|
||||
\resumeItem{Led onboarding and standardization efforts for
|
||||
observability tooling}
|
||||
observability tooling for all applications using Dynatrace and Splunk,
|
||||
increasing alert coverage for all 30 applications}
|
||||
\resumeItem{Fully automated disaster recovery procedures across a
|
||||
subset of our applications}
|
||||
\resumeItem{Automated copy, validation of data, and merging of across
|
||||
\resumeItem{Automated copy, validation, and merging of data across
|
||||
Hadoop lakes (72 hour effort reduction)}
|
||||
\resumeItemListEnd
|
||||
|
||||
|
||||
Reference in New Issue
Block a user