add reviewed resume
This commit is contained in:
160
resume.tex
160
resume.tex
@@ -132,11 +132,11 @@
|
||||
\begin{multicols}{2}
|
||||
\small{
|
||||
\textbf{Languages}{: C/C++, Python, Java, Bash, Typescript} \\
|
||||
\textbf{Frameworks}{: Hadoop, Apache Airflow, Kubernetes, Docker, OpenCV} \\
|
||||
\textbf{Databases/Lakehouses}{: Starburst, Iceberg, Hive,
|
||||
\textbf{Frameworks}{: Hadoop, Apache Airflow, Kubernetes, Docker} \\
|
||||
\textbf{Databases/Lakehouses}{: Starburst, Databricks, Iceberg, Hive,
|
||||
CockroachDB, OracleDB }\\
|
||||
\textbf{OS}{: NixOS, RHEL 8, Debian, Ubuntu Server, Windows} \\
|
||||
\textbf{Tools}{: LaTeX, Terraform, SQL, OpenGL}
|
||||
\textbf{Tools}{: LaTeX, Terraform, SQL, PyArrow, OpenGL}
|
||||
}
|
||||
|
||||
\columnbreak
|
||||
@@ -154,20 +154,27 @@
|
||||
{Software Engineer II}{Jan. 2025 -- Present}
|
||||
{JPMorgan Chase}{Jersey City, NJ}
|
||||
\resumeItemListStart
|
||||
\resumeItem{Created configurable data ingestion framework for
|
||||
automated data refinement and movement for hybrid data lake,
|
||||
responsible for 200+ dataset pipelines across 3 applications,
|
||||
CTAS materialization for 20+ reporting dashboards,
|
||||
data quality monitoring, and data validation}
|
||||
\resumeItem{Architected and implemented an orchestration system based
|
||||
on Apache Airflow for both event-based and SLA-based data ingestion}
|
||||
\resumeItem{Led weekly office hour sessions to demonstrate and assist
|
||||
with onboarding and configuring new datasets}
|
||||
\resumeItem{Trained a team of ten developers to support the custom
|
||||
frameworks for use across multiple applications}
|
||||
\resumeItem{Created Helm charts for making platform UI services
|
||||
available and configurable to other four other platform teams and their applications,
|
||||
using a common service layer to allow for easy integration and maintenance}
|
||||
\resumeItem{Designed and deployed configurable data ingestion framework
|
||||
using Iceberg CTAS and time-travel for zero-outage updates,
|
||||
orchestrating 200+ refinement pipelines with automated data
|
||||
reconciliation across four zones (OLTP, raw, trusted, refined)}
|
||||
\resumeItem{Implemented PyArrow-based validation and dual-engine
|
||||
architecture supporting on-prem (Starburst) and off-prem (Databricks)
|
||||
reporting for 50+ downstream teams}
|
||||
\resumeItem{Architected and implemented Apache Airflow orchestration
|
||||
supporting 1,000+ tasks per DAG with templated configuration-driven
|
||||
design, tiered pooling to prevent resource exhaustion, and automated
|
||||
partition registration in Trino for large Hive tables}
|
||||
\resumeItem{Led weekly office hours to help onboard new datasets and
|
||||
trained 10 developers to operate and extend the framework across
|
||||
multiple applications, reducing MTTR for incidents}
|
||||
\resumeItem{Led Kubernetes resource optimization across 30+ services in
|
||||
three applications, implementing best-effort QoS in dev and test
|
||||
environments while tuning production resources, achieving \$50k
|
||||
annual cost savings in reservations and usage}
|
||||
\resumeItem{Created reusable Helm charts and a shared service layer
|
||||
that enabled 4 platform teams to deploy and configure UI services
|
||||
more consistently}
|
||||
\resumeItemListEnd
|
||||
|
||||
%% create a new resume item below for a software engineering job at
|
||||
@@ -177,23 +184,24 @@ using a common service layer to allow for easy integration and maintenance}
|
||||
{Site Reliability Engineer}{Jul. 2022 -- Jan. 2025}
|
||||
{JPMorgan Chase}{Jersey City, NJ}
|
||||
\resumeItemListStart
|
||||
\resumeItem{Provided on-call support for 30 applications across multiple
|
||||
teams, including Permit to Deploy/Operate process, incident response, root cause analysis, and
|
||||
post-mortems}
|
||||
\resumeItem{SME for Hadoop data lake, means additional support for
|
||||
Tableau dashboards, Kubernetes applications, Cloud Foundry
|
||||
applications, maintaining Dremio instance, AWS S3 compatible object store}
|
||||
\resumeItem{SME for Linux systems, Networking (firewalls, load
|
||||
balancers, etc.), Hadoop}
|
||||
\resumeItem{Led toil reduction and noisy alert reduction across our
|
||||
applications by 40\% through automated recovery, monitoring, and alerting}
|
||||
\resumeItem{Led onboarding and standardization efforts for
|
||||
observability tooling for all applications using Dynatrace and Splunk,
|
||||
increasing alert coverage for all 30 applications}
|
||||
\resumeItem{Fully automated disaster recovery procedures across a
|
||||
subset of our applications}
|
||||
\resumeItem{Automated copy, validation, and merging of data across
|
||||
Hadoop lakes (72 hour effort reduction)}
|
||||
\resumeItem{Owned production support for 30 applications across
|
||||
multiple teams, including deployment approvals, incident response,
|
||||
root cause analysis, and post-mortems}
|
||||
\resumeItem{Served as primary support engineer for a Hadoop-based data
|
||||
lake platform spanning Tableau, Kubernetes, Cloud Foundry, Dremio,
|
||||
and S3-compatible object storage}
|
||||
\resumeItem{Served as the team expert on Linux, networking, and
|
||||
Hadoop infrastructure supporting business-critical applications}
|
||||
\resumeItem{Reduced toil and noisy alerts by 40\% through automated
|
||||
recovery workflows and tighter monitoring and alerting controls}
|
||||
\resumeItem{Standardized Dynatrace and Splunk onboarding across 30
|
||||
applications, improving alert coverage and observability consistency}
|
||||
\resumeItem{Automated disaster recovery procedures for a subset of
|
||||
production applications, reducing manual failover steps}
|
||||
\resumeItem{Automated historical data reload workflows using backup
|
||||
cluster for reprocessing and merge back to primary Hive datasets,
|
||||
reducing 72 hours of manual effort to zero and enabling on-demand
|
||||
backfill capabilities}
|
||||
\resumeItemListEnd
|
||||
|
||||
\resumeSubheading
|
||||
@@ -201,11 +209,11 @@ Hadoop lakes (72 hour effort reduction)}
|
||||
{Stevens Institute of Technology}{(Remote) Hoboken, NJ}
|
||||
\resumeItemListStart
|
||||
\resumeItem{Led a team of student interns to develop
|
||||
\href{https://github.com/StevensDeptECE/GrailGUI}{\textbf{Grail}}, an
|
||||
\textbf{OpenGL}-based graphics API and browser engine}
|
||||
\resumeItem{Ported \textbf{C++} networking functionality on
|
||||
\textbf{Linux} to \textbf{Windows} using \textbf{Winsock}}
|
||||
\resumeItem{Added support for \textbf{ESRI Shapefiles} to draw and
|
||||
\href{https://github.com/StevensDeptECE/GrailGUI}{Grail}, an
|
||||
OpenGL-based graphics API and browser engine}
|
||||
\resumeItem{Ported C++ networking functionality on
|
||||
Linux to Windows using Winsock}
|
||||
\resumeItem{Added support for ESRI Shapefiles to draw and
|
||||
animate maps through rendering engine}
|
||||
\resumeItem{Improved XDL Type system, a custom standard similar to
|
||||
CORBA, to send and receive statically-typed data}
|
||||
@@ -231,10 +239,10 @@ CORBA, to send and receive statically-typed data}
|
||||
{Maritime Security Center}{Hoboken, NJ}
|
||||
\resumeItemListStart
|
||||
\resumeItem{Created an image classification system with
|
||||
\textbf{OpenCV} to filter out noise and detect buoys in a
|
||||
\textbf{ROS/Gazebo} simulation}
|
||||
OpenCV to filter out noise and detect buoys in a
|
||||
ROS/Gazebo simulation}
|
||||
\resumeItem{Added mapping functionality to plot obstacles onto a 2D
|
||||
map generated by \textbf{OctoMap}}
|
||||
map generated by OctoMap}
|
||||
\resumeItem{Optimized the image classification and mapping frameworks
|
||||
to improve reliability in navigation}
|
||||
\resumeItemListEnd
|
||||
@@ -256,42 +264,42 @@ to improve reliability in navigation}
|
||||
|
||||
% Certifications moved to Technical Skills & Certifications section above
|
||||
|
||||
\section{Projects}
|
||||
\resumeSubHeadingListStart
|
||||
\resumeProjectHeading{SwitchForward}{Jun. 2020 -- Aug. 2020}
|
||||
\resumeItemListStart
|
||||
\resumeItem{A \textbf{Python}-based Telegram bot to send stock
|
||||
updates for the Nintendo Switch during a supply shortage}
|
||||
\resumeItem{Used the Gmail API to receive and parse emails from a
|
||||
Google Group tracking Nintendo Switch stock}
|
||||
\resumeItem{Sent updates to a Telegram announcements channel used by
|
||||
\textbf{5-10} users}
|
||||
\resumeItemListEnd
|
||||
|
||||
\resumeProjectHeading{Autonomous Robot}{Aug. 2018 -- Dec. 2018}
|
||||
\resumeItemListStart
|
||||
\resumeItem{An \textbf{Arduino}-based robot designed to navigate
|
||||
through a maze}
|
||||
\resumeItem{Primarily worked on pathplanning and control in a dynamic setting}
|
||||
\resumeItem{Implemented basic error-correction to account for drift
|
||||
during navigation}
|
||||
\resumeItemListEnd
|
||||
|
||||
% Removing this project as it is not as relevant to the software
|
||||
% engineering positions I am applying for
|
||||
% and the work was not as technical as my other experiences
|
||||
%\resumeProjectHeading{Cost-effective Road Anomaly Locator}{Sep. 2016
|
||||
% -- May. 2018}
|
||||
%\section{Projects}
|
||||
%\resumeSubHeadingListStart
|
||||
%\resumeProjectHeading{SwitchForward}{Jun. 2020 -- Aug. 2020}
|
||||
%\resumeItemListStart
|
||||
%\resumeItem{Designed an affordable methodology for implementing and
|
||||
% monitoring a unit to detect potholes and other damaging road
|
||||
% anomalies with \textbf{65\%} accuracy ($p<0.05$)}
|
||||
%\resumeItem{Assembled and tested units for to collect data and
|
||||
% demonstrate effectiveness of the unit}
|
||||
%\resumeItem{Ran several tests and did statistical analysis on the
|
||||
% resulting data}
|
||||
%\resumeItem{A \textbf{Python}-based Telegram bot to send stock
|
||||
% updates for the Nintendo Switch during a supply shortage}
|
||||
%\resumeItem{Used the Gmail API to receive and parse emails from a
|
||||
% Google Group tracking Nintendo Switch stock}
|
||||
%\resumeItem{Sent updates to a Telegram announcements channel used by
|
||||
% \textbf{5-10} users}
|
||||
%\resumeItemListEnd
|
||||
%
|
||||
%\resumeProjectHeading{Autonomous Robot}{Aug. 2018 -- Dec. 2018}
|
||||
%\resumeItemListStart
|
||||
%\resumeItem{An \textbf{Arduino}-based robot designed to navigate
|
||||
% through a maze}
|
||||
%\resumeItem{Primarily worked on pathplanning and control in a dynamic setting}
|
||||
%\resumeItem{Implemented basic error-correction to account for drift
|
||||
% during navigation}
|
||||
%\resumeItemListEnd
|
||||
%
|
||||
%% Removing this project as it is not as relevant to the software
|
||||
%% engineering positions I am applying for
|
||||
%% and the work was not as technical as my other experiences
|
||||
%%\resumeProjectHeading{Cost-effective Road Anomaly Locator}{Sep. 2016
|
||||
%% -- May. 2018}
|
||||
%%\resumeItemListStart
|
||||
%%\resumeItem{Designed an affordable methodology for implementing and
|
||||
%% monitoring a unit to detect potholes and other damaging road
|
||||
%% anomalies with \textbf{65\%} accuracy ($p<0.05$)}
|
||||
%%\resumeItem{Assembled and tested units for to collect data and
|
||||
%% demonstrate effectiveness of the unit}
|
||||
%%\resumeItem{Ran several tests and did statistical analysis on the
|
||||
%% resulting data}
|
||||
%%\resumeItemListEnd
|
||||
|
||||
\resumeSubHeadingListEnd
|
||||
%\resumeSubHeadingListEnd
|
||||
|
||||
\end{document}
|
||||
|
||||
Reference in New Issue
Block a user