1st version

This commit is contained in:
STEINNI
2025-11-04 07:36:02 +00:00
commit 2723c34a5d
177 changed files with 53167 additions and 0 deletions

View File

@@ -0,0 +1 @@
,nike,nikedesk,30.09.2025 09:24,file:///home/nike/.config/libreoffice/4;

375
Jira_helper/TS2509.csv Normal file
View File

@@ -0,0 +1,375 @@
"Date","Qty","Actions","Remarks",,,,
01/12/24,"'(0.5)","POC of writing file from MIDAS container to my S3 frontend + first code structure of PDF Maker",,15,9945,"12033,45",
02/12/24,1,".5H Sprint follow-up (SMEIMKT-8352) + 7.5H SMEIMKT-8268 (mailing sheet page - latest activity, dups, import & delete import)",,,"4972,5","2088,45",
03/12/24,1,"templitor Lambda MySQL refactoring (smeimkt-8112)",,,,,
04/12/24,1,"SMEIMKT-8268 (mailing sheet page - Dups viewer + mappings refactor) + mappings refactor in lambda (SMEIMKT-8267)",,,,,
05/12/24,1,"SMEIMKT-8268 (mailing sheet page - finish mappings refactor + debug sources) + 1.5H Meeting Event brainstorming (SMEIMKT-8168)",,,,,
06/12/24,1,"SMEIMKT-8268 (mailing sheet page - review) + 1.5H Meeting Magali Onboard & pair-debugging",,,,,
07/12/24,"(0,5)","SMEIMKT-8268 (mailing sheet page improve async-loader + debugging Review: Rejection reason)",,,,,
08/12/24,,,,,,,
09/12/24,1,"2H Meeting events (SMEIMKT-8168) + 2H pair-prog Mag. Templitor service + mailing sheet page improve async-loader + debugging Review: Rejection reason (SMEIMKT-8268)",,,,,
10/12/24,1,"0,5H Help Mag. + 0,5H Meeting Val followup + Finished Review: comments + better rights on WF (SMEIMKT-8268)",,,,,
11/12/24,1,"PDF Maker + CNS lambda optimization & switch to MySQL",,,,,
12/12/24,1,"CNS (switch to MySQL + Store CNS_ID) + 0.25 Meeting Val (planning Mailing) + 1H meeting Mag & Axel: PDFMaker & templates + Added B64 tokens in PDFMaker & into CNS Lambda",,,,,
13/12/24,1,"CNS (switch to MySQL + Store CNS_ID + decrypt CNS-ID from email) + 5 2xMeeting Val (planning Mailing) + 0.5H meeting Mag, Mikekel & Andreas PDFMaker planning ",,,,,
14/12/24,"(0,5)","Mailing: Corrected some workflow issues, added the “cancel mailing”, improved async-loader(now re-entrant) + Last-minute R&D re-archi scheduler & injection with large recipients count",,,,,
15/12/24,"(0,5)","Last-minute R&D re-archi scheduler & injection with large recipients count + debug & improve Schedule + schduled mailing (FE & MT)",,,,,
16/12/24,1,"Mailing deployment with Olivier + fixed PDF-Maker UTF8 issue",,,,,
17/12/24,1,"Mailing deployment with Olivier + Chasing MIDAS II IPV6 issue with Stijn",,,,,
18/12/24,1,"Prepared MySQL to SQS lambda+ Meeting synchro with Mike (SMEIMKT-8421)",,,,,
19/12/24,1,"Recup 01/12 & 07/12",,,,,
20/12/24,1,"Recup 14/12 & 15/12",,,,,
21/12/24,,,,,,,
22/12/24,,,,,,,
23/12/24,0,,,,,,
24/12/24,0,,,,,,
25/12/24,0,,,,,,
26/12/24,0,,,,,,
27/12/24,0,,,,,,
28/12/24,,,,,,,
29/12/24,,,,,,,
30/12/24,0,,,,,,
31/12/24,0,,,,,,
01/01/25,0,,,20,13260,"16044,6",
02/01/25,0,,,,6630,"2784,6",
03/01/25,0,,,,,,
04/01/25,,,,,,,
05/01/25,,,,,,,
06/01/25,1,"Deploy & debug Mailings in test(SMEIMKT8231) + debug PDF Maker SMEIMKT-7979",,,,,
07/01/25,1,"Deploy & debug Mailings in test, acc & prod + Demo (SMEIMKT8231)",,,,,
08/01/25,1,"Debug Mailings (mapping & non-mail templates) + Pairprog Mag on templitor (send test mail + new helper + talk about security groups on folders) SMEIMKT-7874",,,,,
09/01/25,1,"1,5H Meeting Integration synchrone POD (SMEIMKT-8448) + Develop Scheduling lambda (SMEIMKT8231)",,,,,
10/01/25,1,"Deploy scheduling lambda + Debug & improve pdfMaker (Special characters, B64 fields) SMEIMKT-7979",,,,,
11/01/25,,,,,,,
12/01/25,,,,,,,
13/01/25,1,"Document Mailing + 3h Meeting synchro Mike (Debug baseroutes & services + activeAttributes + Versioning EIC-Components)",,,,,
14/01/25,1,"Meeting prepa BXL-get-together (1.25H - SMEIMKT-8462) + Document Mailing + 3h Onboarding Steph about the BUS (gateways functionning, code, and the Request-reply via Promise pattern). SMEIMKT-8222",,,,,
15/01/25,1,"Improve Midas Notifications (requestType + requestUUID - check the whole chain until CNS Lambda) SMEIMKT-8081",,,,,
16/01/25,1,"BucConsole SMEIMKT-8510",,,,,
17/01/25,1,"BucConsole SMEIMKT-8510",,,,,
18/01/25,"1 Day (offered)","AI POC : Moderation & Coverage via Midas",,,,,
19/01/25,"1 Day","AI POC : Moderation & Coverage via Midas",,,,,
20/01/25,1,"BusConsole SMEIMKT-8510",,,,,
21/01/25,1,"BusConsole SMEIMKT-8510 + 2.5H pair-debug with Mag. SMEIMKT-7874",,,,,
22/01/25,1,"BXL get-together",,,,,
23/01/25,1,"BXL get-together",,,,,
24/01/25,1,"BXL get-together",,,,,
25/01/25,,,,,,,
26/01/25,,,,,,,
27/01/25,1,"Récup 19/01",,,,,
28/01/25,1,"Adding Queue to Midas + Research about asyncAPI & exchanges with Stijn about it SMEIMKT-8390",,,,,
29/01/25,1,"Meeting Mikelkel (deployment PDF Maker + business & AI strategy) + Meeting contact Mgt status (SMEIMKT-7665) + debugging Midas queue SMEIMKT-7979 + debugging Midas notifs SMEIMKT-8081 + 1.5H pair-debug with Mag. SMEIMKT-7874",,,,,
30/01/25,1,"Debugging Midas notifs SMEIMKT-8081 + Full stack Release + 0.5H pair-debug with Mag.",,,,,
31/01/25,1,"Started UserRoles plugin SMEIMKT-8256 + Meeting Axel about UserRoles contract & roles metas & Responsability API + 0.5H Pair debug with Mag.SMEIMKT-7874 + Meeting Jack & Steph about POD connection",,,,,
01/02/25,"0.5 (offered)","(WE+most evenings) Bus console",,17,11271,"13637,91",
02/02/25,"0.5 (offered)","(WE+most evenings) Bus console",,,"5635,5","2366,91",
03/02/25,1,"Midas UserRoles mgt (user-roles + parallelizing queue processing) SMEIMKT-8256 + 1h Meeting Activity/Contact management status (SMEIMKT-7665) ",,3,2019,"2442,99",
04/02/25,1,"Midas UserRoles mgt (backend-roles + meta + upgrade local MariaDB to 11.5) SMEIMKT-8256 + Meeting Axel for contract on backend-roles) + MeetingMike synchro & UserMgt (Forcing session invalidation or not)",,,"1009,5","423,99",
05/02/25,1,"1h Meeting Activity/Contact management status (SMEIMKT-7665) + Midas UserRoles mgt (backend-roles + meta + Replaced MariaDB By MySQL8 to be 100% AWS compatible) SMEIMKT-8256",,,13290,"16080,9",
06/02/25,1,"Midas deploy & debug of refactored version with userRoles, Queuing + debug problem with got-cloning plugins SMEIMKT-8256",,,6645,"2790,9",
07/02/25,1,"Midas deploy & debug with Stijn (fixed the PDF error) SMEIMKT-8256",,,,,
08/02/25,"0.5 (offered)","(WE+most evenings) Bus console",,,,,
09/02/25,"0.5 (offered)","(WE+most evenings) Bus console",,,,,
10/02/25,1,"0.5 Meeting Activity/Contact management status (SMEIMKT-7665) + Midas (adapted packets to be BW-compatible & redeployed to DEV) SMEIMKT-8256 + 1h Document apiResponse in MT-club minutes + 2h fix SMEIMKT-8555 & explain to Mag. the issue",,,,,
11/02/25,1,"Fixed CreateUserRole packet format after testing in dev, then Released & tested OK Midas UserUpdate from TEST to PROD. (SMEIMKT-8256) + Helped Val testing (Roles) + Fixed small FE issue in mailing (long template names) SMEIMKT-8555 + Added Unschedule to FE (SMEIMKT-8302) ",,,,,
12/02/25,1,"Worked on Delete Mailing (SMEIMKT-8302) + 1h Meeting Activity/Contact management status (SMEIMKT-7665) + 3h Meeting Mike Black-op User Mgt",,,,,
13/02/25,1,"2h Document mailing-service in Swagger (SMEIMKT-8389) + 1h Synchro with Mike about Contact-Mger (SMEIMKT-7665) + Meeting Marc & Mag. about activityMgr, Mailing & contactMgr (2h - SMEIMKT-7665) + 1.5H Meeting Val. stratégie & planning Events (SMEIMKT-7665) + 0.5H Fixed issue seen during Demo to Marc (SMEIMKT-8302)",,,,,
14/02/25,1,"0.25h Meeting MalleJack (lessons learned on MT-issue for ICMP) + .5h help Mag. with env variables in MT (SMEIMKT-7874) + 5h Document mailing-service in Swagger (SMEIMKT-8389) + 2.25 Fixed template browser Path bug + Finished delete-mailing (SMEIMKT-8302)",,,,,
15/02/25,"0.5 (offered)","(WE+most evenings) Bus console",,,,,
16/02/25,"0.5 (offered)","(WE+most evenings) Bus console + Debug Midas dbl-subscribe after config reload (SMEIMKT-8256)",,,,,
17/02/25,1,"Updated Cookie-cutter, recent libs, latest MT-patterns + started with contactMgr lambda + rebuilt mtBus package, to get lambda to communicate with httpGateway + Started kicking out commonlib (OPTIONS handling method) + 1H Meeting Activity/Contact management status (SMEIMKT-7665) + .5h Validated Midas roles services in Prod (SMEIMKT-8256)",,,,,
18/02/25,1,"Rebuilding a session module for ContactMgr lambda (SMEIMKT-7727)",,,,,
19/02/25,1,"0.5H Meeting Activity/Contact management status (SMEIMKT-7665) + .5h Meeting Val. coord activity/contactMgt + Rebuilding a session module for ContactMgr lambda (SMEIMKT-7727)",,,,,
20/02/25,1,"Released & distributed BusConsole 1.0.2 + Removed Bus in platform-crtl from FE + 2h Meeting Stijn about origins, Backstage & busconsole + .5h Meeting Olivier about CORS origins + Improving session module (allowedOrigins) for ContactMgr lambda ([SMEIMKT-7986)",,,,,
21/02/25,1,"Rewrite Marklogic ORM package in ContactMgr lambda ([SMEIMKT-7986) +0,5 meeting MM about ML-ORM",,,,,
22/02/25,,,,,,,
23/02/25,,,,,,,
24/02/25,1,"Rewrite Marklogic ORM package in ContactMgr lambda ([SMEIMKT-7986) + 1,5H Meeting Contact management status (SMEIMKT-7665) + Meeting MM, 1H meeting Axel about ML-ORM",,,,,
25/02/25,1,"Debug & test Marklogic ORM package in ContactMgr lambda (SMEIMKT-7986) + 1h meeting Marc about request-API",,,,,
26/02/25,1,"1,5H Meeting Contact management status + 0,5H Meeting Marc contract MT-BE + Connect & test MT to new request orm (SMEIMKT-7665) + 1H Optimize & debug templitor lambda with Maf. (SMEIMKT-7874)",,,,,
27/02/25,1," 1h Help Mag. With 405 Error from APIGW SMEIMKT-7874 + 3.5H Fix Bus error in prod (httpGW big packets) & fight with broken pipes without Stijn or Olivier (F***) + fight with 7777 because Microsoft account broken + 3.5h n ContactMgr lambda (SMEIMKT-7986) ",,,,,
28/02/25,1," 2h Help Mag. With 405 Error from APIGW SMEIMKT-7874 + 1.5H Meeting Mikelkel coord (planning & tasks for Mag.) 4.5h n ContactMgr lambda & fix apiResponse on OPTION issue (SMEIMKT-7986) ",,,,,
01/03/25,"0.5 (recup)","FE fetch component + improve MT-side (SMEIMKT-7986)",,16,10768,"13029,28",
02/03/25,"0.5 (recup)","FE fetch component + improve MT-side (SMEIMKT-7986)",,,5384,"2261,28",
03/03/25,1,"FE fetch component + improve MT-side (SMEIMKT-7986) + 1,5H Status Meeting Contact management status (SMEIMKT-7665)",,,,,
04/03/25,1,"FE fetch component + improve MT-side (SMEIMKT-7986) + 2,5H Demo & Meeting Contact management status (SMEIMKT-7665)",,,,,
05/03/25,1,"[3h Invoicing & Helpdask about Welcome migration] FE fetch component + migrate & debug MT-side to cope with all improvements made by Marc on ML-side (SMEIMKT-7986) + 2h Meeting Mike about Front-End weird bug about “null in components”",,,,,
06/03/25,1,"2,5H meeting Mag + Meeting Mag & Siggy about archi & planning to connect Drupal webforms to ML via Bus + 2h with Mike: Front-End weird bug about “null in components” finally Infra bug : Stijn Cognito hack + 3.5h improve FE for mark changes (SMEIMKT-7986)",,,,,
07/03/25,1,"4h improve FE for mark changes (SMEIMKT-7986) + 4H Bus console (redpill) ",,,,,
08/03/25,,,,,,,
09/03/25,,,,,,,
10/03/25,"OFF",,,,,,
11/03/25,"OFF",,,,,,
12/03/25,"OFF",,,,,,
13/03/25,"OFF",,,,,,
14/03/25,"OFF",,,,,,
15/03/25,,,,,,,
16/03/25,,,,,,,
17/03/25,1,"Recup 01.03 & 02/03",,,,,
18/03/25,1,"4H Bus console (redpill) + admin (helpdesk & re-invoice) + Read&comment Incident report - SMED-2020/11/18 ",,,,,
19/03/25,1,"6H Debug 401 not triggering re-login (SMEIMKT-8658) + 2H Bus console (redpill) + + Meetin Yassen & Fabien about NET1 lost access + 2H Meeting Olivier about Lambda for bounce mails",,,,,
20/03/25,1,"1.5H Meeting MM middle-tier improvement (code review & session debug) + .5H Meeting Val coordination + 1H Siggy DrupalGateway + 3H Bus console (redpill OK) + 2H Debug the Session issue with Postman with MM",,,,,
21/03/25,1,"3H Code review & debug all new-style Lambda libs (Debugged the Session issue with Postman) + 1H Install & explain Bus console to MM + 4H BUS-POD connection pair-prog with MM (debugged the SecretMgr cnx, from the container)",,,,,
22/03/25,,,,,,,
23/03/25,,,,,,,
24/03/25,1,"1H Meeting Activity/Contact management status (SMEIMKT-7665) + 0.5H Meeting Steph & Val: Tokens APIGW Europa pour bus-POD + 0.5H Meeting Steph install Busconsole + 1H fix secretManager issue on httpGatewy with Olivier + ",,,,,
25/03/25,1,"1H Meeting PDF issue (SMEIMKT-8680) + 1H Meeting Mike (resynchro UI develop branch status) + 2H httpGateway deployment issue in dev + 2H review & amend Stpehane Doc on Bus channels conventions + 2H adding REDIS INFO to WSS Gateway",,,,,
26/03/25,1,"1.5H Meeting Activity/Contact management status (SMEIMKT-7665) +1H Meeting about webforms + 5.5h Pietreechart r&d",,,,,
27/03/25,1,"7h pietreechart for criteria eval (SMEIMKT-7665) + 1H Meeting Siggy about Zammad upgrade issues",,,,,
28/03/25,1,"8h pietreechart for criteria eval + reworking FE & Lambda to latest changes (SMEIMKT-7665) ",,,,,
29/03/25,,,,,,,
30/03/25,,,,,,,
31/03/25,1," 1H Meeting Activity/Contact management status + 1H Meeting Marc debugging MT-ORM crashes after changes (SMEIMKT-7665) + 1H Meeting Olivier Bounce mechanism (SMEIMKT-7931) + 2H Meeting Stijn New documentation system for APIs + testing by redeploying mailing service (SMEIMKT-8710) + Debug with Marc & Axel QueryCanvas issues",,,,,
01/04/25,1,"2.5H Meeting Bus channels (SMEIMKT-7196) + 2H Meeting Bus Drupal (SMEIMKT-7196) + 1H POC double token for POD (SMEIMKT-8325) + 2.5H debug Queries (SMEIMKT-7665)",,15,10095,"12214,95",
02/04/25,1,"1H Meeting Activity/Contact management status 4.5H Meeting Mark & debug Queries (SMEIMKT-7665) + 1H Pair-prog Jack Bus-POD (SMEIMKT-8325) + 1.5H Meeting FFS-securité (SMEIMKT-8734)",,,"5047,5","2119,95",
03/04/25,1,"0.5H Meeting Magali FFS security + 0.5H discuss security Marc (SMEIMKT-8734) + 6.5H Building FE filterspie (legend) + cleanup Insomnium requests to give to Marc, and debug some + started build filters-analyse into Lambda (SMEIMKT-7665) + 1H Trying & modding Stijn Python script for openAPI into Backstage (SMEIMKT-8710)",,,,,
04/04/25,1,"1.5H discuss security Magali (SMEIMKT-8628) + 1H DB & document review (SMEIMKT-8734) + 1.5H Chatxchages Steph + HttpGateway code review + 0.5H call Steph Bus-POD (SMEIMKT-7196) + 4H Code review New documentation system for APIs (SMEIMKT-8710)",,,,,
05/04/25,,,,,,,
06/04/25,,,,,,,
07/04/25,1,"1H Meeting Activity/Contact management status (SMEIMKT-7665) +1H Meeting Marc Dynamic Forms & ContactMGR count & debug + 2H Merge & review HTTPGW code (SMEIMKT-8325)",,,,,
08/04/25,1,"7,5H HTTPGW POD (Pair prog with MM + debug & add features + token event & requests (SMEIMKT-8325) + 0,5H Meeting Marc AccessRights to QueryCanvas",,,,,
09/04/25,1,"1H Meeting Activity/Contact management status (SMEIMKT-7665) + 0,5H Meeting Siggy about Zammad bug (EICINFRA-1272) + Pair-prog with MM on HttpGateway POD + alone (SMEIMKT-8325) + 0,5H meeting Marc about latest features in contactMGR (SMEIMKT-7665)",,,,,
10/04/25,1,"HTTPGW POD (Pair prog with MM 4H + alone : debug) (SMEIMKT-8325) + 1H meeting archi w/ Val.",,,,,
11/04/25,1,"Adapt contactMgr lambda to new version of ORM (SMEIMKT-7665) + (Administrative emergency: Progress report for end-of-last-contract) + Activity Mgt user group Meeting",,,,,
12/04/25,,,,,,,
13/04/25,,,,,,,
14/04/25,0,,,,,,
15/04/25,0,,,,,,
16/04/25,0,,,,,,
17/04/25,0,,,,,,
18/04/25,0,,,,,,
19/04/25,,,,,,,
20/04/25,,,,,,,
21/04/25,0,,,,,,
22/04/25,0,,,,,,
23/04/25,1,"1H Meeting Activity/Contact management status (SMEIMKT-7665) + 3H testing & crashing Bounce Lambda + 1,5H Debugging Marklogic ContactMgr with Marc + 0,5H Meeting Bus (MM, FLE, AB, SS) + 2H admin-shit ",,,,,
24/04/25,1,"5H Pair-prog with MM on httpGateway : deployed final token version in DEV & tested OK (SMEIMKT-8325) + 3H Help Magali refactor her security Lambda in new lambda-style (SMEIMKT-8628)",,,,,
25/04/25,1,"2H Checked httpGW with MM + 1,5H Meeting Mag FFS-security (review DB struct & archi) + 1H Follow-up in DB-struct (SMEIMKT-8628) + 1H Meeting MM& FLE config bus + 2,5H Meeting Olivier Bounce Lambda : fixed deploy + testing with lambda & Search on Mail-header parsing & DSN libs (SMEIMKT-7931)",,,,,
26/04/25,,,,,,,
27/04/25,,,,,,,
28/04/25,1,"1H Meeting Activity/Contact management status (SMEIMKT-7665) + FE-testing with fixed Lambda + Pwd change shit + IT security assignment + Synchro w/ Mike (back from hollidays) about Bus changes, dyna forms & ICMP 2,5H + 2,25H Meeting pair-prog Mag&MM security for FFS (SMEIMKT-8628)",,,,,
29/04/25,1,"6H deploy release + follow-up (SMEIMKT-8807) + 1H Meeting Mag. Security FFS (SMEIMKT-8628) + 1H SMEIMKT-7665",,,,,
30/04/25,1,"1H Meeting Activity/Contact management status (SMEIMKT-7665) + Administrative (TS) + 1H Call MM debrief & checks on httpGW Release + small fix (SMEIMKT-8325)",,,,,
01/05/25,0,,,16,10768,"13029,28",
02/05/25,0,,,,5384,"2261,28",
03/05/25,,,,,,,
04/05/25,,,,,,,
05/05/25,1,"Activity/Contact management : Debug lambda & ML with Axel, then improve FE (SMEIMKT-7364) + aide SS Bus 0,5H (SMEIMKT-8474)",,,,,
06/05/25,1,"Activity/Contact management : Debug lambda & ML with Axel, then improve FE (SMEIMKT-7364) + aide SS Bus 1H (SMEIMKT-8474) + Meeting Stijn APIDocs 2H (SMEIMKT-8710) + 2H Meeting Mike composants FE (SMEIMKT-8474)",,,,,
07/05/25,1,"Activity/Contact management : Debug lambda & ML with Axel, FE: post-processing of filter analysis (SMEIMKT-7364) ",,,,,
08/05/25,0,,,,,,
09/05/25,0,,,,,,
10/05/25,,,,,,,
11/05/25,,,,,,,
12/05/25,1,"Activity/Contact management (SMEIMKT-7665) Meeting & Filter analysis (4h) + Bus upgrade: Merge packet change into latest httpGW & Verify Midas for actions packets structure. (SMEIMKT-8839)",,,,,
13/05/25,1," 0,5H Meeting Val administrative glitch + 1H Meeting Bus about SMEIMKT-8546, Midas I patch & POD stress testing + 2H prepare & release in DEV MIDAS I patch",,,,,
14/05/25,1,"2H Activity/Contact management (SMEIMKT-7665) Meeting & Filter analysis + 1H Chasing Cognito bug with Stijn + 1H call Mike chart components refresh + 1H Meeting Axel optimization & stability QC",,,,,
15/05/25,1,"1,5H MT-club (New life-update tool from Stijn + some coord & exchanges about Authorizer rewrite) + Bounce Lambda (SMEIMKT-7931) + Meeting bounce lambda & CDK & config Olivier (2H) + Bus packet update in DEV (modify httpGW & Midas & test) + Worked on Bounce Lambda (read S3 OK, parsing OK, DSN lib KO…)",,,,,
16/05/25,1,"(récup 30/05)",,,,,
17/05/25,,,,,,,
18/05/25,,,,,,,
19/05/25,1,"Meeting 1H Activity/Contact management (SMEIMKT-7665) + Verify httpGW & Fix Midas for new ""payload.data"" format + testing + Diagnose & fix POD communication + Meeting 1H Fred & Steph token2 testing & fixing + 1H Meeting Steph on channel & payload patterns & security",,,,,
20/05/25,1,"2H Updated & deployed WSSGateway to allow redpill-mode publish & history + 1H Meeting Mike about implementation is Sparc of assets for datahubs + 5H re-implement DSN parsing (RFC3464) for bounce-lambda (SMEIMKT-7931)",,,,,
21/05/25,1,"Bounce lambda (SMEIMKT-7931) + 1H Meeting RAG + 1H Meeting Activity/Contact management (SMEIMKT-7665) ",,,,,
22/05/25,1,"1.5H Meeting Contact search optimization and business needs (SMEIMKT-7665) + 2H Meeting MalleJack Inventory & TODO list for Authorizer migration + Bounce Lambda (dnsParser) (SMEIMKT-7931)",,,,,
23/05/25,1,"Meeting administrative Francoise + Meeting Mag. upgrade Mailing-service lambda to use new FFS security & npm package + Meeting Siggy about existing bounces + Bounce Lambda (dnsParser) (SMEIMKT-7931)",,,,,
24/05/25,,,,,,,
25/05/25,,,,,,,
26/05/25,1,"Bounce Lambda (SMEIMKT-7931) + 1H Activity/Contact management (SMEIMKT-7665) + 1H Meeting with Marc about QueryCanvas perfs",,,,,
27/05/25,1,"Bounce Lambda (SMEIMKT-7931) + 1H Meeting Budg Middle & FE",,,,,
28/05/25,1,"Bounce Lambda (SMEIMKT-7931) + 1H Activity/Contact management (SMEIMKT-7665) + 0.5H Meeting Stijn Bounce lambda CDK issues + 1H meeting Stijn openAPI documentation new tool + 1H Meeting Val (coord & CNS Prod mask) + Deploy httpGW, wssGW & Midas in TEST (2H) + Meeting Siggy synch secretMGR in prod for CNS",,,,,
29/05/25,0,,,,,,
30/05/25,"0 (à récupérer)","Bounce Lambda (cleanup, test & prepare for DB for deploy) (SMEIMKT-7931) + exchanges with Mikelkel about budget (SMEIMKT-7375) + Check about bus deploiment of 28/05",,,,,
31/05/25,,,,,,,
01/06/25,,,,17,11441,"13843,61",
02/06/25,1," 1H Activity/Contact management (SMEIMKT-7665) + 4H Bounce Lambda (update, redeploy & test massmailing dashboard with bounces) (SMEIMKT-7931) + 2H Docs for Authorizer refactoring (SMEIMKT-8836) + 2H worked on contactMgr Lambda (SMEIMKT-7364)",,,"5720,5","2402,61",
03/06/25,1,"1H Meeting Stijn about lambda-code issue, Bounce pipeline & Authorizer refacto (SMEIMKT-7931) + 1H Meeting Axel about contactMgr (stats on results + misc issues) (SMEIMKT-7364) + 2H contactMgr stats + Exchanges & testing + 2H Docs for Authorizer refactoring (SMEIMKT-8836) + 2H bounce lambda update & deploy (SMEIMKT-7931) ",,,,,
04/06/25,1,"Bounce lambda testing on all old mails, update & deploy to prod + tried reimporting old bounces (SMEIMKT-7931)",,,,,
05/06/25,1,"(SMEIMKT-7364) Merged FE contactMgr branches + started Result analysis+ 1H worked on permissions lambda (SMEIMKT-8836) + 1H meeting Authorizer with MM (SMEIMKT-8836)",,,,,
06/06/25,1,"(SMEIMKT-7364) 6h FE & MT contactMgr Result analysis + pies & MT mods for counters + started QuickProbe + (SMEIMKT-8836) 2H Exchange & refexion with Stijn about dual-session api/myeic process (SMEIMKT-8836)",,,,,
07/06/25,,,,,,,
08/06/25,,,,,,,
09/06/25,0,,,,,,
10/06/25,1," 1H Activity/Contact management (SMEIMKT-7665) + 7H Midas update userRole plugin to kill session & notify FE (SMEIMKT-8256)",,,,,
11/06/25,1,"1,5H Meeting Activity/Contact management prepa meeting BAS (SMEIMKT-8785) + 4H Midas update userRole plugin to kill session & notify FE testing (SMEIMKT-8256) + FFS security review with Mag 2,5H (SMEIMKT-8886)",,,,,
12/06/25,1,"1,5H Meeting BAS (SMEIMKT-8785) + 6,5H Midas update userRole plugin to kill session & notify FE testing (SMEIMKT-8256)",,,,,
13/06/25,0,,,,,,
14/06/25,,,,,,,
15/06/25,,,,,,,
16/06/25,1,"5,5H Midas update userRole plugin to kill session & notify FE final testing + 1H debrief meeting BAS (SMEIMKT-8785) + 1,5H Meeting MM authorizer & next day deployment implications (SMEIMKT-8836)",,,,,
17/06/25,1,"2,5h Deploy latest bus changes (SMEIMKT-8546) + 3H Permissions Lambda bug with MM + 2,5H (SMEIMKT-7364) Adding Search Btn + Sample refresh + probe infos",,,,,
18/06/25,1,"1H Activity/Contact management (SMEIMKT-7665) + 4H (SMEIMKT-7364) Adding Search Btn + Sample refresh + probe infos + 3H prepare meeting Ax v& Marc “Make QueryCanvas production-ready” (SMEIMKT-8897) + meeting + document take-aways (SMEIMKT-8897)",,,,,
19/06/25,1,"2H Fixed mailing broken lambda (after Mag merge) & fix same in PROD + 1H MT-club + 2H Prepare authorizer deployment (sync PROD users + hot-fix Midas userRoles plugin) (SMEIMKT-8836) + 3H (SMEIMKT-7364) Change lambda to cope with arrays in results",,,,,
20/06/25,,,,,,,
21/06/25,,,,,,,
22/06/25,,,,,,,
23/06/25,1,"2H Track & Fixed mailing N/A FE issue & hotfix in PROD + 5H Working in points identified in (SMEIMKT-8897) + 1H Meeting Mike synchro & planning about eventsMgt ",,,,,
24/06/25,1,"1H Activity/Contact management (SMEIMKT-7665) + 7H Working on points identified in (SMEIMKT-8897)",,,,,
25/06/25,1,"2H [SMEIMKT-8315] Analysis: User stories creation for event management MVP + 2H help prepare deploy patched authorizer (SMEIMKT-8836) + 4H Working in points identified in - Better topx counters (SMEIMKT-8897)",,,,,
26/06/25,0,"(Internet Down)",,,,,
27/06/25,1,"2,5H help Jack on end-of-deploy patched authorizer (SMEIMKT-8836), DB cleanup & Kill old-Midas with Siggy + ,5H help Mag & Betty with CNS whitelist + 1H Help MM & Franck with Bypass issue + 4H Working in points identified in - Better topx counters (SMEIMKT-8897)",,,,,
28/06/25,,,,,,,
29/06/25,,,,,,,
30/06/25,1,"1H Activity/Contact management (SMEIMKT-7665) + Working in points identified in (SMEIMKT-8897)",,,,,
01/07/25,1,"2H prepa meeting testing + meeting (SMEIMKT-8909) + 1H Meeting Axel make QC prodready + Working in points identified in (SMEIMKT-8897) changed to vertical tabs",,12,8076,"9771,96",
02/07/25,1,"Removed Samples + improved probe with explorer SMEIMKT-8897",,,4038,"1695,96",
03/07/25,1,"1H Activity/Contact management (SMEIMKT-7665) + improved probe with explorer SMEIMKT-8897 + Meeting Mag activity Mgr (FE &MT creation events) (SMEIMKT-8960)",,,,,
04/07/25,0,,,,,,
05/07/25,0,,,,,,
06/07/25,0,,,,,,
07/07/25,0,,,,,,
08/07/25,0,,,,,,
09/07/25,0,,,,,,
10/07/25,0,,,,,,
11/07/25,0,,,,,,
12/07/25,0,,,,,,
13/07/25,0,,,,,,
14/07/25,0,,,,,,
15/07/25,0,,,,,,
16/07/25,1,"1H Activity/Contact management meeting (SMEIMKT-7665) + Working in points identified + Meeting Axel API changes after 1st round of optimizations + More changes TODO: DATE field as criteria, as Graph + Arrays in probe browser + changed Roles=> dynamic (SMEIMKT-8897)",,,,,
17/07/25,1,"3H Meeting Stijn about Authenticator & MT libs & Backstage & K8S (SMEIMKT-8836) + ContactMgr changes : DATE field as criteria (SMEIMKT-8897) + 3H Meeting Activity Management follow-up (SMEIMKT-8960) + 2H DATE field as criteria, as Graph OK (SMEIMKT-8897)",,,,,
18/07/25,0,,,,,,
19/07/25,,,,,,,
20/07/25,,,,,,,
21/07/25,0,,,,,,"EU-Survey: SMEIMKT-8093"
22/07/25,1,"4H Restart & State inventory / damage report on Sparcomatic (SMEIMKT-8909) + 4H Adapt QC Lambda to latest ORM version,& modifs + finish latests points & fixes in ContactMgr (Probe bug, fileds names in Probe-browser, Arrays) (SMEIMKT-8897)",,,,,
23/07/25,1,"1,25H Activity/Contact management meeting (SMEIMKT-7665) + 6,75H Merge latest develop changes back to contactMgr branch, compare & test + Start to fetch from QC for real into Mailer + refacto massmailing lambda to latest MT standards (SMEIMKT-8897)",,,,,
24/07/25,1,"4H Debug Refacto massmailing lambda (SMEIMKT-9022) + import QC in massmailing + 1,5H Meeting Data import to ML for the event and beyond (SMEIMKT-8671) + In contactMgr: put dynamic roles (SMEIMKT-8897)",,,,,
25/07/25,0,,,,,,
26/07/25,,,,,,,
27/07/25,,,,,,,
28/07/25,1,"1H Activity/Contact management meeting (SMEIMKT-7665) + 2H Fixed filter counters bug in Mass-mailing (SMEIMKT-9009) + 1H Meeting about TESTING with Betty & Mikelkel (SMEIMKT-8909) + 1H Packing VM to give to B.(SMEIMKT-8909) + Investigate existin MT mlodel & DB structure to make new imports (SMEIMKT-9023) ",,,,,
29/07/25,1,"2H sync meeting Mike FE archi. & components (SMEIMKT-8532) + 1H Onboarding Betty: giving access to sparcomatic VM + 4H ContactMgr make Fetch work with imports (MT + FE) (SMEIMKT-9023) + 1H Fix bug SMEIMKT-9015",,,,,
30/07/25,1,"1H Activity/Contact management meeting (SMEIMKT-7665) + 1,5H Investigate Ticket SMEIMKT-9016 +,5H Meeting Romain doc Mailing + 5H ContactMgr make Fetch work with imports (fix hints, add name of query, fix UI, started query refresh & edit (SMEIMKT-9023) ",,,,,
31/07/25,1,"8H ContactMgr make Fetch work with imports: Edit + Refresh query (SMEIMKT-9023) ",,,,,
01/08/25,0,,,16,10768,"13029,28",
02/08/25,,,,,5384,"2261,28",
03/08/25,,,,,,,
04/08/25,1,"0,5H SMEIMKT-7986 + 1H Activity/Contact management meeting (SMEIMKT-8513) + 2,5H Help Siggy setup sparcomatic FE (SMEIMKT-9020) + 4H Refresh query : Change according to meeting Warning with diffs before (SMEIMKT-9023) ",,,,,
05/08/25,1,"1H Help Siggy setup sparcomatic FE (SMEIMKT-9020) + 7H Exclusion lists (SMEIMKT-9028)",,,,,
06/08/25,1," 1H Activity/Contact management meeting (SMEIMKT-8513) + 0,5H Help Siggy setup sparcomatic FE + 0,5H Meeting QA Status to show ongoing changes (SMEIMKT-8939)+ 6H Exclusion lists (SMEIMKT-9028)",,,,,
07/08/25,1," Debug Dialogs issue (4h) +1H Exclusion lists (SMEIMKT-9028) + 3H Help Betty understand SPARC login & debug + setting up Github & sync code (SMEIMKT-8909)",,,,,
08/08/25,0,,,,,,
09/08/25,,,,,,,
10/08/25,,,,,,,
11/08/25,1,"1H Activity/Contact management meeting (SMEIMKT-8513) + 6H Exclusion lists (SMEIMKT-9028) + 1H Call Siggy to get proper access to Backstage (lib creation) EICINFRA-1405 ",,,,,
12/08/25,1,"2H Bounce emails PROD testing + import (smeimkt-8656) + 6H Transform mailingsModel in a lib, update mailing service & scheduled lambda + change for exclusions (SMEIMKT-9028) ",,,,,
13/08/25,1,"1H review & update mail sending chain for change for exclusions (SMEIMKT-9028) + 2,5H Activity/Contact management meeting + analysis of bounce behaviors (SMEIMKT-8513) + 4,5H Group work on old bounce issues + Update old bounce lambda",,,,,
14/08/25,1,"4H Verify bounce end-to-end in PROD + 2H meeting & coding with Siggy to cleanup & recuperate old bounces + 2H Document bounces discussion",,,,,
15/08/25,0,,,,,,
16/08/25,,,,,,,
17/08/25,,,,,,,
18/08/25,1,"1H Activity/Contact management meeting (SMEIMKT-8513) + 1,5H Meeting about documentation + 1H Trials with Siggy about Swagger generation + 0,5H Fix Sparcomatic install + 2H Check & fix Sparcomatic deploy + 2H Bounce archi documentation & DB structure",,,,,
19/08/25,1,"2H Fix Sparcomatic install + 1h Meeting Betty + 1h Meeting Siggy about old bounces + Implementing & deploying blacklist into bounce lambda + MT rights in PROD cleanup with Mag + 1h fix schedule bug (SMEIMKT-8915)",,,,,
20/08/25,"0,5","1H Activity/Contact management meeting (SMEIMKT-8513) + 2H Meeting Stijn about backstage & Sparcomatic domain CORS issue + 0,5 Testing Sparcomatic (SMEIMKT-9030) + 0,5H Meeting Siggy old bounces ",,,,,
21/08/25,1,"1H Meeting Siggy old bounces + 1H MeetingBetty Sparcomatic + 2H Fixing Sparcomatic for ECAS (SMEIMKT-9030) + 2H Meeting Backstage (SMEIMKT-8244) + 2H Debug with Steph dissemination + bus in TEST",,,,,
22/08/25,0,,,,,,
23/08/25,,,,,,,
24/08/25,,,,,,,
25/08/25,1,"1H Activity/Contact management meeting (SMEIMKT-8513) + Blacklist in CNS Notifier + testing (5H) + Finish refacto of mailing-service",,,,,
26/08/25,1,"0,5H Help Betty with ECAS issue + 2H Cleaning up deployments and merges for bounce-service, ope-cns-notifier and mailing-service + 1,5H meeting Siggy about Gitlab and Backstage access issues + 4H Create structure (DB & pieces of code) for activlinks (hops) (smeimkt-9078)",,,,,
27/08/25,1,"1H Activity/Contact management meeting (SMEIMKT-8513) + 0,5H meeting Val about compliance + 1H structure change in templates & access FFS with Mikelkel + 0,5H tests for Stijn about GL & BS issues + 1H DB structure for hops + 4H Meeting about GL workflows with Siggy + ",,,,,
28/08/25,1,"1H Meeting StijnAuthorizer (EICINFRA-1408) + 1H Meeting Romain& Mikelkel about template paths & variables + 2H modify SPARC Core for better external redirections. (EICINFRA-1408) + 1H FFS users in ACCP SMEIMKT-9062 +3H token research & coding for hops",,,,,
29/08/25,"0,5","4h Lambda cookie cutter rework => multi templates + ML lambda new style (SMEIMKT-9077)",,,,,
30/08/25,,,,,,,
31/08/25,,,,,,,
01/09/25,0,,,16,10768,"13029,28","SMEIMKT-7375 coord"
02/09/25,0,,,,5384,"2261,28","SMEIMKT-5295 Team Calls"
03/09/25,0,,,,,,"SMEIMKT-502 user guides, onboarding"
04/09/25,0,,,,,,"SMEIMKT4956 Recurrent operational tasks"
05/09/25,0,,,,,,"Onboarding: EPIC SMEIMKT-502"
06/09/25,,,,,,,"Training Magali: SMEIMKT-7742 (FE) + SMEIMKT-7874 (MT)"
07/09/25,,,,,,,
08/09/25,1,"1H Activity/Contact management meeting (SMEIMKT-8513) + 5h Chasing mailing crash in TEST (SMEIMKT-9085) + 2H New Middletier template for cookie cutter(SMEIMKT-9077)",,,,,
09/09/25,1,"1h meeting Stijn about mt-sessionRead, & SSM params (SMEIMKT-9085) + 1h meetings Romain & Mikelkel about ACCP MFA ( SMEIMKT-9138) + 1H meeting MM about mt issues (SMEIMKT-8244) + 2H improved mutils with logAllRejections (SMEIMKT-8244) + 3H Start writing lambda for activlinks (hops) (SMEIMKT-9078) ",,,,,
10/09/25,1,"1H Activity/Contact management meeting (SMEIMKT-8513) + 7H writing lambda for activlinks (hops) (SMEIMKT-9078) ",,,,,
11/09/25,1,"1H mailing & tpl access to users in prod (SMEIMKT-9091) + 2H published mtUtils 1.7.2 with logAllRejections after long fight with bundler (SMEIMKT-8244) + 1H Meeting Mikelkel about mailing watermarking recipient tracing (SMEIMKT-9078) + 4H hopper lambda & modif cns-notifier & scheduler for recipient ID tracing (SMEIMKT-9078)",,,,,
12/09/25,0,,,,,,
13/09/25,,,,,,,
14/09/25,,,,,,,
15/09/25,1,"1H Activity/Contact management meeting (SMEIMKT-8513) + 0.5H put a robots.txt in each environment (SMEIMKT-6434) + 6.5H Testing hopper stats & adapting token (Meeting W/ Axel & Franck about ML recipient IDs) (SMEIMKT-9078)",,,,,
16/09/25,1,"0.5H Meeting Mike Synchro (SMEIMKT-5295) + 4h mtMailUrlToken lib (SMEIMKT-9078) + 3.5H Finished testing WM workflow started documenting Watermarking process (SMEIMKT-9078) ",,,,,
17/09/25,1,"8h Hopper & CNS-lambda (for hopper & Meeting Mag about templitor tags & behavior for watermarking & Document & redesign Watermarking process (SMEIMKT-9078)",,,,,
18/09/25,1,"3h Document & redesign Watermarking process (SMEIMKT-9078) + 2h Meeting Betty (& Stijn) to fix status issue in mailings (SMEIMKT-9118) + 3h Pair prog with Mag to set templitor lambda to latest standards (SMEIMKT-8161)",,,,,
19/09/25,1,"8H Finished documenting after refactory, Recoded CNS-notifier with html parsing (first jsDOM, then converted to linkedom), added anonymous RCP, improved token lib, adapted hopper lambda, tested not OK (SMEIMKT-9078)",,,,,
20/09/25,,,,,,,
21/09/25,,,,,,,
22/09/25,1,"6H Debugged CNS-notifier & hopper, cleanup & deploy (SMEIMKT-9078) + 2H CSV crashes SMEIMKT-9124",,,,,
23/09/25,1,"2H Proper release of CNS & Hopper & mtMailingModel with all latest changes & bugfixes in DEV (SMEIMKT-9124) + 2H Meeting Mike about bug in core SMEIMKT-8915 + 3H started Mailing statistics FE side (SMEIMKT-9124) + 1H Meeting Mag. about Activity/Contact management Babystep (SMEIMKT-9137)",,,,,
24/09/25,1,"3H Mailing statistics FE side (SMEIMKT-9124) + 1H Activity/Contact management meeting (SMEIMKT-8513) + 2.5H Meeting Steph, Manu, Mag bout API for baby-step (SMEIMKT-9137) + 1H Bug mailing Request review button doesn't hide (SMEIMKT-9125) + 0.5H Mailing statistics FE side (SMEIMKT-9124) ",,,,,
25/09/25,1,"4H Schedule bug (SMEIMKT-8915) + 1H Meeting Betty (Sparcomatic & release process) (SMEIMKT-9138) + 1H Meeting Structure templitor & release process (SMEIMKT-9138) + 1H Meeting Baby-step Activities API (SMEIMKT-9137) + 1H Mailing statistics FE side (single route for different views:1st unclean solution) (SMEIMKT-9124) ",,,,,
26/09/25,1,"2.5H Meeting about release process (SMEIMKT-9138) +1.5H Mailing statistics FE side (SMEIMKT-9124) + 0.5H analyse Mikelkells release workflow (SMEIMKT-9138) + 0.5H Call Mag. about watermarking (SMEIMKT-9078) + 3H Mailing statistics FE side (single route for different views: solved) (SMEIMKT-9124)",,,,,
27/09/25,,,,,,,
28/09/25,,,,,,,
29/09/25,1," 1H Activity/Contact management meeting (SMEIMKT-8513) + 0.5H Meeting Siggy about stabilization Release (SMEIMKT-9138) + 2.5H Timezone display SMEIMKT-9136 + 1H Meeting release process (SMEIMKT-9138) + 1.75H Meeting Releases (SMEIMKT-9138) + 1.25H Mailing statistics FE side (SMEIMKT-9124)",,,,,
30/09/25,1,,,,,,
01/10/25,,,,,,,
02/10/25,,,,,,,
03/10/25,,,,,,,
04/10/25,,,,,,,
05/10/25,,,,,,,
06/10/25,,,,,,,
07/10/25,,,,,,,
08/10/25,,,,,,,
09/10/25,,,,,,,
10/10/25,,,,,,,
11/10/25,,,,,,,
12/10/25,,,,,,,
13/10/25,,,,,,,
14/10/25,,,,,,,
15/10/25,,,,,,,
16/10/25,,,,,,,
17/10/25,,,,,,,
18/10/25,,,,,,,
19/10/25,,,,,,,
20/10/25,,,,,,,
21/10/25,,,,,,,
22/10/25,,,,,,,
23/10/25,,,,,,,
24/10/25,,,,,,,
25/10/25,,,,,,,
26/10/25,,,,,,,
27/10/25,,,,,,,
28/10/25,,,,,,,
29/10/25,,,,,,,
30/10/25,,,,,,,
31/10/25,,,,,,,
01/11/25,,,,,,,
02/11/25,,,,,,,
03/11/25,,,,,,,
04/11/25,,,,,,,
05/11/25,,,,,,,
06/11/25,,,,,,,
07/11/25,,,,,,,
08/11/25,,,,,,,
09/11/25,,,,,,,
10/11/25,,,,,,,
11/11/25,,,,,,,
12/11/25,,,,,,,
13/11/25,,,,,,,
14/11/25,,,,,,,
15/11/25,,,,,,,
16/11/25,,,,,,,
17/11/25,,,,,,,
18/11/25,,,,,,,
19/11/25,,,,,,,
20/11/25,,,,,,,
21/11/25,,,,,,,
22/11/25,,,,,,,
23/11/25,,,,,,,
24/11/25,,,,,,,
25/11/25,,,,,,,
26/11/25,,,,,,,
27/11/25,,,,,,,
28/11/25,,,,,,,
29/11/25,,,,,,,
30/11/25,,,,,,,
01/12/25,,,,,,,
02/12/25,,,,,,,
03/12/25,,,,,,,
04/12/25,,,,,,,
05/12/25,,,,,,,
06/12/25,,,,,,,
07/12/25,,,,,,,
08/12/25,,,,,,,
09/12/25,,,,,,,
1 Date Qty Actions Remarks
2 01/12/24 '(0.5) POC of writing file from MIDAS container to my S3 frontend + first code structure of PDF Maker 15 9945 12033,45
3 02/12/24 1 .5H Sprint follow-up (SMEIMKT-8352) + 7.5H SMEIMKT-8268 (mailing sheet page - latest activity, dups, import & delete import) 4972,5 2088,45
4 03/12/24 1 templitor Lambda MySQL refactoring (smeimkt-8112)
5 04/12/24 1 SMEIMKT-8268 (mailing sheet page - Dups viewer + mappings refactor) + mappings refactor in lambda (SMEIMKT-8267)
6 05/12/24 1 SMEIMKT-8268 (mailing sheet page - finish mappings refactor + debug sources) + 1.5H Meeting Event brainstorming (SMEIMKT-8168)
7 06/12/24 1 SMEIMKT-8268 (mailing sheet page - review) + 1.5H Meeting Magali Onboard & pair-debugging
8 07/12/24 (0,5) SMEIMKT-8268 (mailing sheet page – improve async-loader + debugging Review: Rejection reason)
9 08/12/24
10 09/12/24 1 2H Meeting events (SMEIMKT-8168) + 2H pair-prog Mag. Templitor service + mailing sheet page – improve async-loader + debugging Review: Rejection reason (SMEIMKT-8268)
11 10/12/24 1 0,5H Help Mag. + 0,5H Meeting Val followup + Finished Review: comments + better rights on WF (SMEIMKT-8268)
12 11/12/24 1 PDF Maker + CNS lambda optimization & switch to MySQL
13 12/12/24 1 CNS (switch to MySQL + Store CNS_ID) + 0.25 Meeting Val (planning Mailing) + 1H meeting Mag & Axel: PDFMaker & templates + Added B64 tokens in PDFMaker & into CNS Lambda
14 13/12/24 1 CNS (switch to MySQL + Store CNS_ID + decrypt CNS-ID from email) + 5 2xMeeting Val (planning Mailing) + 0.5H meeting Mag, Mikekel & Andreas PDFMaker planning
15 14/12/24 (0,5) Mailing: Corrected some workflow issues, added the “cancel mailing”, improved async-loader(now re-entrant) + Last-minute R&D re-archi scheduler & injection with large recipients count
16 15/12/24 (0,5) Last-minute R&D re-archi scheduler & injection with large recipients count + debug & improve Schedule + schduled mailing (FE & MT)
17 16/12/24 1 Mailing deployment with Olivier + fixed PDF-Maker UTF8 issue
18 17/12/24 1 Mailing deployment with Olivier + Chasing MIDAS II IPV6 issue with Stijn
19 18/12/24 1 Prepared MySQL to SQS lambda+ Meeting synchro with Mike (SMEIMKT-8421)
20 19/12/24 1 Recup 01/12 & 07/12
21 20/12/24 1 Recup 14/12 & 15/12
22 21/12/24
23 22/12/24
24 23/12/24 0
25 24/12/24 0
26 25/12/24 0
27 26/12/24 0
28 27/12/24 0
29 28/12/24
30 29/12/24
31 30/12/24 0
32 31/12/24 0
33 01/01/25 0 20 13260 16044,6
34 02/01/25 0 6630 2784,6
35 03/01/25 0
36 04/01/25
37 05/01/25
38 06/01/25 1 Deploy & debug Mailings in test(SMEIMKT8231) + debug PDF Maker SMEIMKT-7979
39 07/01/25 1 Deploy & debug Mailings in test, acc & prod + Demo (SMEIMKT8231)
40 08/01/25 1 Debug Mailings (mapping & non-mail templates) + Pairprog Mag on templitor (send test mail + new helper + talk about security groups on folders) SMEIMKT-7874
41 09/01/25 1 1,5H Meeting Integration synchrone POD (SMEIMKT-8448) + Develop Scheduling lambda (SMEIMKT8231)
42 10/01/25 1 Deploy scheduling lambda + Debug & improve pdfMaker (Special characters, B64 fields) SMEIMKT-7979
43 11/01/25
44 12/01/25
45 13/01/25 1 Document Mailing + 3h Meeting synchro Mike (Debug baseroutes & services + activeAttributes + Versioning EIC-Components)
46 14/01/25 1 Meeting prepa BXL-get-together (1.25H - SMEIMKT-8462) + Document Mailing + 3h Onboarding Steph about the BUS (gateways functionning, code, and the Request-reply via Promise pattern). SMEIMKT-8222
47 15/01/25 1 Improve Midas Notifications (requestType + requestUUID - check the whole chain until CNS Lambda) SMEIMKT-8081
48 16/01/25 1 BucConsole SMEIMKT-8510
49 17/01/25 1 BucConsole SMEIMKT-8510
50 18/01/25 1 Day (offered) AI POC : Moderation & Coverage via Midas
51 19/01/25 1 Day AI POC : Moderation & Coverage via Midas
52 20/01/25 1 BusConsole SMEIMKT-8510
53 21/01/25 1 BusConsole SMEIMKT-8510 + 2.5H pair-debug with Mag. SMEIMKT-7874
54 22/01/25 1 BXL get-together
55 23/01/25 1 BXL get-together
56 24/01/25 1 BXL get-together
57 25/01/25
58 26/01/25
59 27/01/25 1 Récup 19/01
60 28/01/25 1 Adding Queue to Midas + Research about asyncAPI & exchanges with Stijn about it SMEIMKT-8390
61 29/01/25 1 Meeting Mikelkel (deployment PDF Maker + business & AI strategy) + Meeting contact Mgt status (SMEIMKT-7665) + debugging Midas queue SMEIMKT-7979 + debugging Midas notifs SMEIMKT-8081 + 1.5H pair-debug with Mag. SMEIMKT-7874
62 30/01/25 1 Debugging Midas notifs SMEIMKT-8081 + Full stack Release + 0.5H pair-debug with Mag.
63 31/01/25 1 Started UserRoles plugin SMEIMKT-8256 + Meeting Axel about UserRoles contract & roles metas & Responsability API + 0.5H Pair debug with Mag.SMEIMKT-7874 + Meeting Jack & Steph about POD connection
64 01/02/25 0.5 (offered) (WE+most evenings) Bus console 17 11271 13637,91
65 02/02/25 0.5 (offered) (WE+most evenings) Bus console 5635,5 2366,91
66 03/02/25 1 Midas UserRoles mgt (user-roles + parallelizing queue processing) SMEIMKT-8256 + 1h Meeting Activity/Contact management status (SMEIMKT-7665) 3 2019 2442,99
67 04/02/25 1 Midas UserRoles mgt (backend-roles + meta + upgrade local MariaDB to 11.5) SMEIMKT-8256 + Meeting Axel for contract on backend-roles) + MeetingMike synchro & UserMgt (Forcing session invalidation or not) 1009,5 423,99
68 05/02/25 1 1h Meeting Activity/Contact management status (SMEIMKT-7665) + Midas UserRoles mgt (backend-roles + meta + Replaced MariaDB By MySQL8 to be 100% AWS compatible) SMEIMKT-8256 13290 16080,9
69 06/02/25 1 Midas deploy & debug of refactored version with userRoles, Queuing + debug problem with got-cloning plugins SMEIMKT-8256 6645 2790,9
70 07/02/25 1 Midas deploy & debug with Stijn (fixed the PDF error) SMEIMKT-8256
71 08/02/25 0.5 (offered) (WE+most evenings) Bus console
72 09/02/25 0.5 (offered) (WE+most evenings) Bus console
73 10/02/25 1 0.5 Meeting Activity/Contact management status (SMEIMKT-7665) + Midas (adapted packets to be BW-compatible & redeployed to DEV) SMEIMKT-8256 + 1h Document apiResponse in MT-club minutes + 2h fix SMEIMKT-8555 & explain to Mag. the issue
74 11/02/25 1 Fixed CreateUserRole packet format after testing in dev, then Released & tested OK Midas UserUpdate from TEST to PROD. (SMEIMKT-8256) + Helped Val testing (Roles) + Fixed small FE issue in mailing (long template names) SMEIMKT-8555 + Added Unschedule to FE (SMEIMKT-8302)
75 12/02/25 1 Worked on Delete Mailing (SMEIMKT-8302) + 1h Meeting Activity/Contact management status (SMEIMKT-7665) + 3h Meeting Mike Black-op User Mgt
76 13/02/25 1 2h Document mailing-service in Swagger (SMEIMKT-8389) + 1h Synchro with Mike about Contact-Mger (SMEIMKT-7665) + Meeting Marc & Mag. about activityMgr, Mailing & contactMgr (2h - SMEIMKT-7665) + 1.5H Meeting Val. stratégie & planning Events (SMEIMKT-7665) + 0.5H Fixed issue seen during Demo to Marc (SMEIMKT-8302)
77 14/02/25 1 0.25h Meeting MalleJack (lessons learned on MT-issue for ICMP) + .5h help Mag. with env variables in MT (SMEIMKT-7874) + 5h Document mailing-service in Swagger (SMEIMKT-8389) + 2.25 Fixed template browser Path bug + Finished delete-mailing (SMEIMKT-8302)
78 15/02/25 0.5 (offered) (WE+most evenings) Bus console
79 16/02/25 0.5 (offered) (WE+most evenings) Bus console + Debug Midas dbl-subscribe after config reload (SMEIMKT-8256)
80 17/02/25 1 Updated Cookie-cutter, recent libs, latest MT-patterns + started with contactMgr lambda + rebuilt mtBus package, to get lambda to communicate with httpGateway + Started kicking out commonlib (OPTIONS handling method) + 1H Meeting Activity/Contact management status (SMEIMKT-7665) + .5h Validated Midas roles services in Prod (SMEIMKT-8256)
81 18/02/25 1 Rebuilding a session module for ContactMgr lambda (SMEIMKT-7727)
82 19/02/25 1 0.5H Meeting Activity/Contact management status (SMEIMKT-7665) + .5h Meeting Val. coord activity/contactMgt + Rebuilding a session module for ContactMgr lambda (SMEIMKT-7727)
83 20/02/25 1 Released & distributed BusConsole 1.0.2 + Removed Bus in platform-crtl from FE + 2h Meeting Stijn about origins, Backstage & busconsole + .5h Meeting Olivier about CORS origins + Improving session module (allowedOrigins) for ContactMgr lambda ([SMEIMKT-7986)
84 21/02/25 1 Rewrite Marklogic ORM package in ContactMgr lambda ([SMEIMKT-7986) +0,5 meeting MM about ML-ORM
85 22/02/25
86 23/02/25
87 24/02/25 1 Rewrite Marklogic ORM package in ContactMgr lambda ([SMEIMKT-7986) + 1,5H Meeting Contact management status (SMEIMKT-7665) + Meeting MM, 1H meeting Axel about ML-ORM
88 25/02/25 1 Debug & test Marklogic ORM package in ContactMgr lambda (SMEIMKT-7986) + 1h meeting Marc about request-API
89 26/02/25 1 1,5H Meeting Contact management status + 0,5H Meeting Marc contract MT-BE + Connect & test MT to new request orm (SMEIMKT-7665) + 1H Optimize & debug templitor lambda with Maf. (SMEIMKT-7874)
90 27/02/25 1 1h Help Mag. With 405 Error from APIGW SMEIMKT-7874 + 3.5H Fix Bus error in prod (httpGW big packets) & fight with broken pipes without Stijn or Olivier (F***) + fight with 7777 because Microsoft account broken + 3.5h n ContactMgr lambda (SMEIMKT-7986)
91 28/02/25 1 2h Help Mag. With 405 Error from APIGW SMEIMKT-7874 + 1.5H Meeting Mikelkel coord (planning & tasks for Mag.) 4.5h n ContactMgr lambda & fix apiResponse on OPTION issue (SMEIMKT-7986)
92 01/03/25 0.5 (recup) FE fetch component + improve MT-side (SMEIMKT-7986) 16 10768 13029,28
93 02/03/25 0.5 (recup) FE fetch component + improve MT-side (SMEIMKT-7986) 5384 2261,28
94 03/03/25 1 FE fetch component + improve MT-side (SMEIMKT-7986) + 1,5H Status Meeting Contact management status (SMEIMKT-7665)
95 04/03/25 1 FE fetch component + improve MT-side (SMEIMKT-7986) + 2,5H Demo & Meeting Contact management status (SMEIMKT-7665)
96 05/03/25 1 [3h Invoicing & Helpdask about Welcome migration] FE fetch component + migrate & debug MT-side to cope with all improvements made by Marc on ML-side (SMEIMKT-7986) + 2h Meeting Mike about Front-End weird bug about “null in components”
97 06/03/25 1 2,5H meeting Mag + Meeting Mag & Siggy about archi & planning to connect Drupal webforms to ML via Bus + 2h with Mike: Front-End weird bug about “null in components” finally Infra bug : Stijn Cognito hack + 3.5h improve FE for mark changes (SMEIMKT-7986)
98 07/03/25 1 4h improve FE for mark changes (SMEIMKT-7986) + 4H Bus console (redpill)
99 08/03/25
100 09/03/25
101 10/03/25 OFF
102 11/03/25 OFF
103 12/03/25 OFF
104 13/03/25 OFF
105 14/03/25 OFF
106 15/03/25
107 16/03/25
108 17/03/25 1 Recup 01.03 & 02/03
109 18/03/25 1 4H Bus console (redpill) + admin (helpdesk & re-invoice) + Read&comment Incident report - SMED-2020/11/18
110 19/03/25 1 6H Debug 401 not triggering re-login (SMEIMKT-8658) + 2H Bus console (redpill) + + Meetin Yassen & Fabien about NET1 lost access + 2H Meeting Olivier about Lambda for bounce mails
111 20/03/25 1 1.5H Meeting MM middle-tier improvement (code review & session debug) + .5H Meeting Val coordination + 1H Siggy DrupalGateway + 3H Bus console (redpill OK) + 2H Debug the Session issue with Postman with MM
112 21/03/25 1 3H Code review & debug all new-style Lambda libs (Debugged the Session issue with Postman) + 1H Install & explain Bus console to MM + 4H BUS-POD connection pair-prog with MM (debugged the SecretMgr cnx, from the container)
113 22/03/25
114 23/03/25
115 24/03/25 1 1H Meeting Activity/Contact management status (SMEIMKT-7665) + 0.5H Meeting Steph & Val: Tokens APIGW Europa pour bus-POD + 0.5H Meeting Steph install Busconsole + 1H fix secretManager issue on httpGatewy with Olivier +
116 25/03/25 1 1H Meeting PDF issue (SMEIMKT-8680) + 1H Meeting Mike (resynchro UI develop branch status) + 2H httpGateway deployment issue in dev + 2H review & amend Stpehane Doc on Bus channels conventions + 2H adding REDIS INFO to WSS Gateway
117 26/03/25 1 1.5H Meeting Activity/Contact management status (SMEIMKT-7665) +1H Meeting about webforms + 5.5h Pietreechart r&d
118 27/03/25 1 7h pietreechart for criteria eval (SMEIMKT-7665) + 1H Meeting Siggy about Zammad upgrade issues
119 28/03/25 1 8h pietreechart for criteria eval + reworking FE & Lambda to latest changes (SMEIMKT-7665)
120 29/03/25
121 30/03/25
122 31/03/25 1 1H Meeting Activity/Contact management status + 1H Meeting Marc debugging MT-ORM crashes after changes (SMEIMKT-7665) + 1H Meeting Olivier Bounce mechanism (SMEIMKT-7931) + 2H Meeting Stijn New documentation system for APIs + testing by redeploying mailing service (SMEIMKT-8710) + Debug with Marc & Axel QueryCanvas issues
123 01/04/25 1 2.5H Meeting Bus channels (SMEIMKT-7196) + 2H Meeting Bus Drupal (SMEIMKT-7196) + 1H POC double token for POD (SMEIMKT-8325) + 2.5H debug Queries (SMEIMKT-7665) 15 10095 12214,95
124 02/04/25 1 1H Meeting Activity/Contact management status 4.5H Meeting Mark & debug Queries (SMEIMKT-7665) + 1H Pair-prog Jack Bus-POD (SMEIMKT-8325) + 1.5H Meeting FFS-securité (SMEIMKT-8734) 5047,5 2119,95
125 03/04/25 1 0.5H Meeting Magali FFS security + 0.5H discuss security Marc (SMEIMKT-8734) + 6.5H Building FE filterspie (legend) + cleanup Insomnium requests to give to Marc, and debug some + started build filters-analyse into Lambda (SMEIMKT-7665) + 1H Trying & modding Stijn Python script for openAPI into Backstage (SMEIMKT-8710)
126 04/04/25 1 1.5H discuss security Magali (SMEIMKT-8628) + 1H DB & document review (SMEIMKT-8734) + 1.5H Chatxchages Steph + HttpGateway code review + 0.5H call Steph Bus-POD (SMEIMKT-7196) + 4H Code review New documentation system for APIs (SMEIMKT-8710)
127 05/04/25
128 06/04/25
129 07/04/25 1 1H Meeting Activity/Contact management status (SMEIMKT-7665) +1H Meeting Marc Dynamic Forms & ContactMGR count & debug + 2H Merge & review HTTPGW code (SMEIMKT-8325)
130 08/04/25 1 7,5H HTTPGW POD (Pair prog with MM + debug & add features + token event & requests (SMEIMKT-8325) + 0,5H Meeting Marc AccessRights to QueryCanvas
131 09/04/25 1 1H Meeting Activity/Contact management status (SMEIMKT-7665) + 0,5H Meeting Siggy about Zammad bug (EICINFRA-1272) + Pair-prog with MM on HttpGateway POD + alone (SMEIMKT-8325) + 0,5H meeting Marc about latest features in contactMGR (SMEIMKT-7665)
132 10/04/25 1 HTTPGW POD (Pair prog with MM 4H + alone : debug) (SMEIMKT-8325) + 1H meeting archi w/ Val.
133 11/04/25 1 Adapt contactMgr lambda to new version of ORM (SMEIMKT-7665) + (Administrative emergency: Progress report for end-of-last-contract) + Activity Mgt user group Meeting
134 12/04/25
135 13/04/25
136 14/04/25 0
137 15/04/25 0
138 16/04/25 0
139 17/04/25 0
140 18/04/25 0
141 19/04/25
142 20/04/25
143 21/04/25 0
144 22/04/25 0
145 23/04/25 1 1H Meeting Activity/Contact management status (SMEIMKT-7665) + 3H testing & crashing Bounce Lambda + 1,5H Debugging Marklogic ContactMgr with Marc + 0,5H Meeting Bus (MM, FLE, AB, SS) + 2H admin-shit
146 24/04/25 1 5H Pair-prog with MM on httpGateway : deployed final token version in DEV & tested OK (SMEIMKT-8325) + 3H Help Magali refactor her security Lambda in new lambda-style (SMEIMKT-8628)
147 25/04/25 1 2H Checked httpGW with MM + 1,5H Meeting Mag FFS-security (review DB struct & archi) + 1H Follow-up in DB-struct (SMEIMKT-8628) + 1H Meeting MM& FLE config bus + 2,5H Meeting Olivier Bounce Lambda : fixed deploy + testing with lambda & Search on Mail-header parsing & DSN libs (SMEIMKT-7931)
148 26/04/25
149 27/04/25
150 28/04/25 1 1H Meeting Activity/Contact management status (SMEIMKT-7665) + FE-testing with fixed Lambda + Pwd change shit + IT security assignment + Synchro w/ Mike (back from hollidays) about Bus changes, dyna forms & ICMP 2,5H + 2,25H Meeting pair-prog Mag&MM security for FFS (SMEIMKT-8628)
151 29/04/25 1 6H deploy release + follow-up (SMEIMKT-8807) + 1H Meeting Mag. Security FFS (SMEIMKT-8628) + 1H SMEIMKT-7665
152 30/04/25 1 1H Meeting Activity/Contact management status (SMEIMKT-7665) + Administrative (TS) + 1H Call MM debrief & checks on httpGW Release + small fix (SMEIMKT-8325)
153 01/05/25 0 16 10768 13029,28
154 02/05/25 0 5384 2261,28
155 03/05/25
156 04/05/25
157 05/05/25 1 Activity/Contact management : Debug lambda & ML with Axel, then improve FE (SMEIMKT-7364) + aide SS Bus 0,5H (SMEIMKT-8474)
158 06/05/25 1 Activity/Contact management : Debug lambda & ML with Axel, then improve FE (SMEIMKT-7364) + aide SS Bus 1H (SMEIMKT-8474) + Meeting Stijn APIDocs 2H (SMEIMKT-8710) + 2H Meeting Mike composants FE (SMEIMKT-8474)
159 07/05/25 1 Activity/Contact management : Debug lambda & ML with Axel, FE: post-processing of filter analysis (SMEIMKT-7364)
160 08/05/25 0
161 09/05/25 0
162 10/05/25
163 11/05/25
164 12/05/25 1 Activity/Contact management (SMEIMKT-7665) Meeting & Filter analysis (4h) + Bus upgrade: Merge packet change into latest httpGW & Verify Midas for actions packets structure. (SMEIMKT-8839)
165 13/05/25 1 0,5H Meeting Val administrative glitch + 1H Meeting Bus about SMEIMKT-8546, Midas I patch & POD stress testing + 2H prepare & release in DEV MIDAS I patch
166 14/05/25 1 2H Activity/Contact management (SMEIMKT-7665) Meeting & Filter analysis + 1H Chasing Cognito bug with Stijn + 1H call Mike chart components refresh + 1H Meeting Axel optimization & stability QC
167 15/05/25 1 1,5H MT-club (New life-update tool from Stijn + some coord & exchanges about Authorizer rewrite) + Bounce Lambda (SMEIMKT-7931) + Meeting bounce lambda & CDK & config Olivier (2H) + Bus packet update in DEV (modify httpGW & Midas & test) + Worked on Bounce Lambda (read S3 OK, parsing OK, DSN lib KO…)
168 16/05/25 1 (récup 30/05)
169 17/05/25
170 18/05/25
171 19/05/25 1 Meeting 1H Activity/Contact management (SMEIMKT-7665) + Verify httpGW & Fix Midas for new "payload.data" format + testing + Diagnose & fix POD communication + Meeting 1H Fred & Steph token2 testing & fixing + 1H Meeting Steph on channel & payload patterns & security
172 20/05/25 1 2H Updated & deployed WSSGateway to allow redpill-mode publish & history + 1H Meeting Mike about implementation is Sparc of assets for datahubs + 5H re-implement DSN parsing (RFC3464) for bounce-lambda (SMEIMKT-7931)
173 21/05/25 1 Bounce lambda (SMEIMKT-7931) + 1H Meeting RAG + 1H Meeting Activity/Contact management (SMEIMKT-7665)
174 22/05/25 1 1.5H Meeting Contact search optimization and business needs (SMEIMKT-7665) + 2H Meeting MalleJack Inventory & TODO list for Authorizer migration + Bounce Lambda (dnsParser) (SMEIMKT-7931)
175 23/05/25 1 Meeting administrative Francoise + Meeting Mag. upgrade Mailing-service lambda to use new FFS security & npm package + Meeting Siggy about existing bounces + Bounce Lambda (dnsParser) (SMEIMKT-7931)
176 24/05/25
177 25/05/25
178 26/05/25 1 Bounce Lambda (SMEIMKT-7931) + 1H Activity/Contact management (SMEIMKT-7665) + 1H Meeting with Marc about QueryCanvas perfs
179 27/05/25 1 Bounce Lambda (SMEIMKT-7931) + 1H Meeting Budg Middle & FE
180 28/05/25 1 Bounce Lambda (SMEIMKT-7931) + 1H Activity/Contact management (SMEIMKT-7665) + 0.5H Meeting Stijn Bounce lambda CDK issues + 1H meeting Stijn openAPI documentation new tool + 1H Meeting Val (coord & CNS Prod mask) + Deploy httpGW, wssGW & Midas in TEST (2H) + Meeting Siggy synch secretMGR in prod for CNS
181 29/05/25 0
182 30/05/25 0 (à récupérer) Bounce Lambda (cleanup, test & prepare for DB for deploy) (SMEIMKT-7931) + exchanges with Mikelkel about budget (SMEIMKT-7375) + Check about bus deploiment of 28/05
183 31/05/25
184 01/06/25 17 11441 13843,61
185 02/06/25 1 1H Activity/Contact management (SMEIMKT-7665) + 4H Bounce Lambda (update, redeploy & test massmailing dashboard with bounces) (SMEIMKT-7931) + 2H Docs for Authorizer refactoring (SMEIMKT-8836) + 2H worked on contactMgr Lambda (SMEIMKT-7364) 5720,5 2402,61
186 03/06/25 1 1H Meeting Stijn about lambda-code issue, Bounce pipeline & Authorizer refacto (SMEIMKT-7931) + 1H Meeting Axel about contactMgr (stats on results + misc issues) (SMEIMKT-7364) + 2H contactMgr stats + Exchanges & testing + 2H Docs for Authorizer refactoring (SMEIMKT-8836) + 2H bounce lambda update & deploy (SMEIMKT-7931)
187 04/06/25 1 Bounce lambda testing on all old mails, update & deploy to prod + tried reimporting old bounces (SMEIMKT-7931)
188 05/06/25 1 (SMEIMKT-7364) Merged FE contactMgr branches + started Result analysis+ 1H worked on permissions lambda (SMEIMKT-8836) + 1H meeting Authorizer with MM (SMEIMKT-8836)
189 06/06/25 1 (SMEIMKT-7364) 6h FE & MT contactMgr Result analysis + pies & MT mods for counters + started QuickProbe + (SMEIMKT-8836) 2H Exchange & refexion with Stijn about dual-session api/myeic process (SMEIMKT-8836)
190 07/06/25
191 08/06/25
192 09/06/25 0
193 10/06/25 1 1H Activity/Contact management (SMEIMKT-7665) + 7H Midas update userRole plugin to kill session & notify FE (SMEIMKT-8256)
194 11/06/25 1 1,5H Meeting Activity/Contact management prepa meeting BAS (SMEIMKT-8785) + 4H Midas update userRole plugin to kill session & notify FE testing (SMEIMKT-8256) + FFS security review with Mag 2,5H (SMEIMKT-8886)
195 12/06/25 1 1,5H Meeting BAS (SMEIMKT-8785) + 6,5H Midas update userRole plugin to kill session & notify FE testing (SMEIMKT-8256)
196 13/06/25 0
197 14/06/25
198 15/06/25
199 16/06/25 1 5,5H Midas update userRole plugin to kill session & notify FE final testing + 1H debrief meeting BAS (SMEIMKT-8785) + 1,5H Meeting MM authorizer & next day deployment implications (SMEIMKT-8836)
200 17/06/25 1 2,5h Deploy latest bus changes (SMEIMKT-8546) + 3H Permissions Lambda bug with MM + 2,5H (SMEIMKT-7364) Adding Search Btn + Sample refresh + probe infos
201 18/06/25 1 1H Activity/Contact management (SMEIMKT-7665) + 4H (SMEIMKT-7364) Adding Search Btn + Sample refresh + probe infos + 3H prepare meeting Ax v& Marc “Make QueryCanvas production-ready” (SMEIMKT-8897) + meeting + document take-aways (SMEIMKT-8897)
202 19/06/25 1 2H Fixed mailing broken lambda (after Mag merge) & fix same in PROD + 1H MT-club + 2H Prepare authorizer deployment (sync PROD users + hot-fix Midas userRoles plugin) (SMEIMKT-8836) + 3H (SMEIMKT-7364) Change lambda to cope with arrays in results
203 20/06/25
204 21/06/25
205 22/06/25
206 23/06/25 1 2H Track & Fixed mailing N/A FE issue & hotfix in PROD + 5H Working in points identified in (SMEIMKT-8897) + 1H Meeting Mike synchro & planning about eventsMgt
207 24/06/25 1 1H Activity/Contact management (SMEIMKT-7665) + 7H Working on points identified in (SMEIMKT-8897)
208 25/06/25 1 2H [SMEIMKT-8315] Analysis: User stories creation for event management MVP + 2H help prepare deploy patched authorizer (SMEIMKT-8836) + 4H Working in points identified in - Better topx counters (SMEIMKT-8897)
209 26/06/25 0 (Internet Down)
210 27/06/25 1 2,5H help Jack on end-of-deploy patched authorizer (SMEIMKT-8836), DB cleanup & Kill old-Midas with Siggy + ,5H help Mag & Betty with CNS whitelist + 1H Help MM & Franck with Bypass issue + 4H Working in points identified in - Better topx counters (SMEIMKT-8897)
211 28/06/25
212 29/06/25
213 30/06/25 1 1H Activity/Contact management (SMEIMKT-7665) + Working in points identified in (SMEIMKT-8897)
214 01/07/25 1 2H prepa meeting testing + meeting (SMEIMKT-8909) + 1H Meeting Axel make QC prodready + Working in points identified in (SMEIMKT-8897) changed to vertical tabs 12 8076 9771,96
215 02/07/25 1 Removed Samples + improved probe with explorer SMEIMKT-8897 4038 1695,96
216 03/07/25 1 1H Activity/Contact management (SMEIMKT-7665) + improved probe with explorer SMEIMKT-8897 + Meeting Mag activity Mgr (FE &MT creation events) (SMEIMKT-8960)
217 04/07/25 0
218 05/07/25 0
219 06/07/25 0
220 07/07/25 0
221 08/07/25 0
222 09/07/25 0
223 10/07/25 0
224 11/07/25 0
225 12/07/25 0
226 13/07/25 0
227 14/07/25 0
228 15/07/25 0
229 16/07/25 1 1H Activity/Contact management meeting (SMEIMKT-7665) + Working in points identified + Meeting Axel API changes after 1st round of optimizations + More changes TODO: DATE field as criteria, as Graph + Arrays in probe browser + changed Roles=> dynamic (SMEIMKT-8897)
230 17/07/25 1 3H Meeting Stijn about Authenticator & MT libs & Backstage & K8S (SMEIMKT-8836) + ContactMgr changes : DATE field as criteria (SMEIMKT-8897) + 3H Meeting Activity Management follow-up (SMEIMKT-8960) + 2H DATE field as criteria, as Graph OK (SMEIMKT-8897)
231 18/07/25 0
232 19/07/25
233 20/07/25
234 21/07/25 0 EU-Survey: SMEIMKT-8093
235 22/07/25 1 4H Restart & State inventory / damage report on Sparcomatic (SMEIMKT-8909) + 4H Adapt QC Lambda to latest ORM version,& modifs + finish latests points & fixes in ContactMgr (Probe bug, fileds names in Probe-browser, Arrays) (SMEIMKT-8897)
236 23/07/25 1 1,25H Activity/Contact management meeting (SMEIMKT-7665) + 6,75H Merge latest develop changes back to contactMgr branch, compare & test + Start to fetch from QC for real into Mailer + refacto massmailing lambda to latest MT standards (SMEIMKT-8897)
237 24/07/25 1 4H Debug Refacto massmailing lambda (SMEIMKT-9022) + import QC in massmailing + 1,5H Meeting Data import to ML for the event and beyond (SMEIMKT-8671) + In contactMgr: put dynamic roles (SMEIMKT-8897)
238 25/07/25 0
239 26/07/25
240 27/07/25
241 28/07/25 1 1H Activity/Contact management meeting (SMEIMKT-7665) + 2H Fixed filter counters bug in Mass-mailing (SMEIMKT-9009) + 1H Meeting about TESTING with Betty & Mikelkel (SMEIMKT-8909) + 1H Packing VM to give to B.(SMEIMKT-8909) + Investigate existin MT mlodel & DB structure to make new imports (SMEIMKT-9023)
242 29/07/25 1 2H sync meeting Mike FE archi. & components (SMEIMKT-8532) + 1H Onboarding Betty: giving access to sparcomatic VM + 4H ContactMgr make Fetch work with imports (MT + FE) (SMEIMKT-9023) + 1H Fix bug SMEIMKT-9015
243 30/07/25 1 1H Activity/Contact management meeting (SMEIMKT-7665) + 1,5H Investigate Ticket SMEIMKT-9016 +,5H Meeting Romain doc Mailing + 5H ContactMgr make Fetch work with imports (fix hints, add name of query, fix UI, started query refresh & edit (SMEIMKT-9023)
244 31/07/25 1 8H ContactMgr make Fetch work with imports: Edit + Refresh query (SMEIMKT-9023)
245 01/08/25 0 16 10768 13029,28
246 02/08/25 5384 2261,28
247 03/08/25
248 04/08/25 1 0,5H SMEIMKT-7986 + 1H Activity/Contact management meeting (SMEIMKT-8513) + 2,5H Help Siggy setup sparcomatic FE (SMEIMKT-9020) + 4H Refresh query : Change according to meeting – Warning with diffs before (SMEIMKT-9023)
249 05/08/25 1 1H Help Siggy setup sparcomatic FE (SMEIMKT-9020) + 7H Exclusion lists (SMEIMKT-9028)
250 06/08/25 1 1H Activity/Contact management meeting (SMEIMKT-8513) + 0,5H Help Siggy setup sparcomatic FE + 0,5H Meeting QA Status to show ongoing changes (SMEIMKT-8939)+ 6H Exclusion lists (SMEIMKT-9028)
251 07/08/25 1 Debug Dialogs issue (4h) +1H Exclusion lists (SMEIMKT-9028) + 3H Help Betty understand SPARC login & debug + setting up Github & sync code (SMEIMKT-8909)
252 08/08/25 0
253 09/08/25
254 10/08/25
255 11/08/25 1 1H Activity/Contact management meeting (SMEIMKT-8513) + 6H Exclusion lists (SMEIMKT-9028) + 1H Call Siggy to get proper access to Backstage (lib creation) EICINFRA-1405
256 12/08/25 1 2H Bounce emails PROD testing + import (smeimkt-8656) + 6H Transform mailingsModel in a lib, update mailing service & scheduled lambda + change for exclusions (SMEIMKT-9028)
257 13/08/25 1 1H review & update mail sending chain for change for exclusions (SMEIMKT-9028) + 2,5H Activity/Contact management meeting + analysis of bounce behaviors (SMEIMKT-8513) + 4,5H Group work on old bounce issues + Update old bounce lambda
258 14/08/25 1 4H Verify bounce end-to-end in PROD + 2H meeting & coding with Siggy to cleanup & recuperate old bounces + 2H Document bounces discussion
259 15/08/25 0
260 16/08/25
261 17/08/25
262 18/08/25 1 1H Activity/Contact management meeting (SMEIMKT-8513) + 1,5H Meeting about documentation + 1H Trials with Siggy about Swagger generation + 0,5H Fix Sparcomatic install + 2H Check & fix Sparcomatic deploy + 2H Bounce archi documentation & DB structure
263 19/08/25 1 2H Fix Sparcomatic install + 1h Meeting Betty + 1h Meeting Siggy about old bounces + Implementing & deploying blacklist into bounce lambda + MT rights in PROD cleanup with Mag + 1h fix schedule bug (SMEIMKT-8915)
264 20/08/25 0,5 1H Activity/Contact management meeting (SMEIMKT-8513) + 2H Meeting Stijn about backstage & Sparcomatic domain CORS issue + 0,5 Testing Sparcomatic (SMEIMKT-9030) + 0,5H Meeting Siggy old bounces
265 21/08/25 1 1H Meeting Siggy old bounces + 1H MeetingBetty Sparcomatic + 2H Fixing Sparcomatic for ECAS (SMEIMKT-9030) + 2H Meeting Backstage (SMEIMKT-8244) + 2H Debug with Steph dissemination + bus in TEST
266 22/08/25 0
267 23/08/25
268 24/08/25
269 25/08/25 1 1H Activity/Contact management meeting (SMEIMKT-8513) + Blacklist in CNS Notifier + testing (5H) + Finish refacto of mailing-service
270 26/08/25 1 0,5H Help Betty with ECAS issue + 2H Cleaning up deployments and merges for bounce-service, ope-cns-notifier and mailing-service + 1,5H meeting Siggy about Gitlab and Backstage access issues + 4H Create structure (DB & pieces of code) for activlinks (hops) (smeimkt-9078)
271 27/08/25 1 1H Activity/Contact management meeting (SMEIMKT-8513) + 0,5H meeting Val about compliance + 1H structure change in templates & access FFS with Mikelkel + 0,5H tests for Stijn about GL & BS issues + 1H DB structure for hops + 4H Meeting about GL workflows with Siggy +
272 28/08/25 1 1H Meeting StijnAuthorizer (EICINFRA-1408) + 1H Meeting Romain& Mikelkel about template paths & variables + 2H modify SPARC Core for better external redirections. (EICINFRA-1408) + 1H FFS users in ACCP SMEIMKT-9062 +3H token research & coding for hops
273 29/08/25 0,5 4h Lambda cookie cutter rework => multi templates + ML lambda new style (SMEIMKT-9077)
274 30/08/25
275 31/08/25
276 01/09/25 0 16 10768 13029,28 SMEIMKT-7375 coord
277 02/09/25 0 5384 2261,28 SMEIMKT-5295 Team Calls
278 03/09/25 0 SMEIMKT-502 user guides, onboarding
279 04/09/25 0 SMEIMKT4956 Recurrent operational tasks
280 05/09/25 0 Onboarding: EPIC SMEIMKT-502
281 06/09/25 Training Magali: SMEIMKT-7742 (FE) + SMEIMKT-7874 (MT)
282 07/09/25
283 08/09/25 1 1H Activity/Contact management meeting (SMEIMKT-8513) + 5h Chasing mailing crash in TEST (SMEIMKT-9085) + 2H New Middletier template for cookie cutter(SMEIMKT-9077)
284 09/09/25 1 1h meeting Stijn about mt-sessionRead, & SSM params (SMEIMKT-9085) + 1h meetings Romain & Mikelkel about ACCP MFA ( SMEIMKT-9138) + 1H meeting MM about mt issues (SMEIMKT-8244) + 2H improved mutils with logAllRejections (SMEIMKT-8244) + 3H Start writing lambda for activlinks (hops) (SMEIMKT-9078)
285 10/09/25 1 1H Activity/Contact management meeting (SMEIMKT-8513) + 7H writing lambda for activlinks (hops) (SMEIMKT-9078)
286 11/09/25 1 1H mailing & tpl access to users in prod (SMEIMKT-9091) + 2H published mtUtils 1.7.2 with logAllRejections after long fight with bundler (SMEIMKT-8244) + 1H Meeting Mikelkel about mailing watermarking recipient tracing (SMEIMKT-9078) + 4H hopper lambda & modif cns-notifier & scheduler for recipient ID tracing (SMEIMKT-9078)
287 12/09/25 0
288 13/09/25
289 14/09/25
290 15/09/25 1 1H Activity/Contact management meeting (SMEIMKT-8513) + 0.5H put a robots.txt in each environment (SMEIMKT-6434) + 6.5H Testing hopper stats & adapting token (Meeting W/ Axel & Franck about ML recipient IDs) (SMEIMKT-9078)
291 16/09/25 1 0.5H Meeting Mike Synchro (SMEIMKT-5295) + 4h mtMailUrlToken lib (SMEIMKT-9078) + 3.5H Finished testing WM workflow started documenting Watermarking process (SMEIMKT-9078)
292 17/09/25 1 8h Hopper & CNS-lambda (for hopper & Meeting Mag about templitor tags & behavior for watermarking & Document & redesign Watermarking process (SMEIMKT-9078)
293 18/09/25 1 3h Document & redesign Watermarking process (SMEIMKT-9078) + 2h Meeting Betty (& Stijn) to fix status issue in mailings (SMEIMKT-9118) + 3h Pair prog with Mag to set templitor lambda to latest standards (SMEIMKT-8161)
294 19/09/25 1 8H Finished documenting after refactory, Recoded CNS-notifier with html parsing (first jsDOM, then converted to linkedom), added anonymous RCP, improved token lib, adapted hopper lambda, tested not OK (SMEIMKT-9078)
295 20/09/25
296 21/09/25
297 22/09/25 1 6H Debugged CNS-notifier & hopper, cleanup & deploy (SMEIMKT-9078) + 2H CSV crashes SMEIMKT-9124
298 23/09/25 1 2H Proper release of CNS & Hopper & mtMailingModel with all latest changes & bugfixes in DEV (SMEIMKT-9124) + 2H Meeting Mike about bug in core SMEIMKT-8915 + 3H started Mailing statistics FE side (SMEIMKT-9124) + 1H Meeting Mag. about Activity/Contact management Babystep (SMEIMKT-9137)
299 24/09/25 1 3H Mailing statistics FE side (SMEIMKT-9124) + 1H Activity/Contact management meeting (SMEIMKT-8513) + 2.5H Meeting Steph, Manu, Mag bout API for baby-step (SMEIMKT-9137) + 1H Bug mailing Request review button doesn't hide (SMEIMKT-9125) + 0.5H Mailing statistics FE side (SMEIMKT-9124)
300 25/09/25 1 4H Schedule bug (SMEIMKT-8915) + 1H Meeting Betty (Sparcomatic & release process) (SMEIMKT-9138) + 1H Meeting Structure templitor & release process (SMEIMKT-9138) + 1H Meeting Baby-step Activities API (SMEIMKT-9137) + 1H Mailing statistics FE side (single route for different views:1st unclean solution) (SMEIMKT-9124)
301 26/09/25 1 2.5H Meeting about release process (SMEIMKT-9138) +1.5H Mailing statistics FE side (SMEIMKT-9124) + 0.5H analyse Mikelkells release workflow (SMEIMKT-9138) + 0.5H Call Mag. about watermarking (SMEIMKT-9078) + 3H Mailing statistics FE side (single route for different views: solved) (SMEIMKT-9124)
302 27/09/25
303 28/09/25
304 29/09/25 1 1H Activity/Contact management meeting (SMEIMKT-8513) + 0.5H Meeting Siggy about stabilization Release (SMEIMKT-9138) + 2.5H Timezone display SMEIMKT-9136 + 1H Meeting release process (SMEIMKT-9138) + 1.75H Meeting Releases (SMEIMKT-9138) + 1.25H Mailing statistics FE side (SMEIMKT-9124)
305 30/09/25 1
306 01/10/25
307 02/10/25
308 03/10/25
309 04/10/25
310 05/10/25
311 06/10/25
312 07/10/25
313 08/10/25
314 09/10/25
315 10/10/25
316 11/10/25
317 12/10/25
318 13/10/25
319 14/10/25
320 15/10/25
321 16/10/25
322 17/10/25
323 18/10/25
324 19/10/25
325 20/10/25
326 21/10/25
327 22/10/25
328 23/10/25
329 24/10/25
330 25/10/25
331 26/10/25
332 27/10/25
333 28/10/25
334 29/10/25
335 30/10/25
336 31/10/25
337 01/11/25
338 02/11/25
339 03/11/25
340 04/11/25
341 05/11/25
342 06/11/25
343 07/11/25
344 08/11/25
345 09/11/25
346 10/11/25
347 11/11/25
348 12/11/25
349 13/11/25
350 14/11/25
351 15/11/25
352 16/11/25
353 17/11/25
354 18/11/25
355 19/11/25
356 20/11/25
357 21/11/25
358 22/11/25
359 23/11/25
360 24/11/25
361 25/11/25
362 26/11/25
363 27/11/25
364 28/11/25
365 29/11/25
366 30/11/25
367 01/12/25
368 02/12/25
369 03/12/25
370 04/12/25
371 05/12/25
372 06/12/25
373 07/12/25
374 08/12/25
375 09/12/25

View File

@@ -0,0 +1,314 @@
[
{
"Date": "08/09/25",
"TaskCode": "SMEIMKT-8513",
"Duration": 1,
"Description": "1H Activity/Contact management meeting (SMEIMKT-8513)"
},
{
"Date": "08/09/25",
"TaskCode": "SMEIMKT-9085",
"Duration": 5,
"Description": "5h Chasing mailing crash in TEST (SMEIMKT-9085)"
},
{
"Date": "08/09/25",
"TaskCode": "SMEIMKT-9077",
"Duration": 2,
"Description": "2H New Middletier template for cookie cutter(SMEIMKT-9077)"
},
{
"Date": "09/09/25",
"TaskCode": "SMEIMKT-9085",
"Duration": 1,
"Description": "1h meeting Stijn about mt-sessionRead, & SSM params (SMEIMKT-9085)"
},
{
"Date": "09/09/25",
"TaskCode": "SMEIMKT-9138",
"Duration": 1,
"Description": "1h meetings Romain & Mikelkel about ACCP MFA ( SMEIMKT-9138)"
},
{
"Date": "09/09/25",
"TaskCode": "SMEIMKT-8244",
"Duration": 1,
"Description": "1H meeting MM about mt issues (SMEIMKT-8244)"
},
{
"Date": "09/09/25",
"TaskCode": "SMEIMKT-8244",
"Duration": 2,
"Description": "2H improved mutils with logAllRejections (SMEIMKT-8244)"
},
{
"Date": "09/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 3,
"Description": "3H Start writing lambda for activlinks (hops) (SMEIMKT-9078)"
},
{
"Date": "10/09/25",
"TaskCode": "SMEIMKT-8513",
"Duration": 1,
"Description": "1H Activity/Contact management meeting (SMEIMKT-8513)"
},
{
"Date": "10/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 7,
"Description": "7H writing lambda for activlinks (hops) (SMEIMKT-9078)"
},
{
"Date": "11/09/25",
"TaskCode": "SMEIMKT-9091",
"Duration": 1,
"Description": "1H mailing & tpl access to users in prod (SMEIMKT-9091)"
},
{
"Date": "11/09/25",
"TaskCode": "SMEIMKT-8244",
"Duration": 2,
"Description": "2H published mtUtils 1.7.2 with logAllRejections after long fight with bundler (SMEIMKT-8244)"
},
{
"Date": "11/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 1,
"Description": "1H Meeting Mikelkel about mailing watermarking recipient tracing (SMEIMKT-9078)"
},
{
"Date": "11/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 4,
"Description": "4H hopper lambda & modif cns-notifier & scheduler for recipient ID tracing (SMEIMKT-9078)"
},
{
"Date": "15/09/25",
"TaskCode": "SMEIMKT-8513",
"Duration": 1,
"Description": "1H Activity/Contact management meeting (SMEIMKT-8513)"
},
{
"Date": "15/09/25",
"TaskCode": "SMEIMKT-6434",
"Duration": 0.5,
"Description": "0.5H put a robots.txt in each environment (SMEIMKT-6434)"
},
{
"Date": "15/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 6.5,
"Description": "6.5H Testing hopper stats & adapting token (Meeting W/ Axel & Franck about ML recipient IDs) (SMEIMKT-9078)"
},
{
"Date": "16/09/25",
"TaskCode": "SMEIMKT-5295",
"Duration": 0.5,
"Description": "0.5H Meeting Mike Synchro (SMEIMKT-5295)"
},
{
"Date": "16/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 4,
"Description": "4h mtMailUrlToken lib (SMEIMKT-9078)"
},
{
"Date": "16/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 3.5,
"Description": "3.5H Finished testing WM workflow started documenting Watermarking process (SMEIMKT-9078)"
},
{
"Date": "17/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 8,
"Description": "8h Hopper & CNS-lambda (for hopper & Meeting Mag about templitor tags & behavior for watermarking & Document & redesign Watermarking process (SMEIMKT-9078)"
},
{
"Date": "18/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 3,
"Description": "3h Document & redesign Watermarking process (SMEIMKT-9078)"
},
{
"Date": "18/09/25",
"TaskCode": "SMEIMKT-9118",
"Duration": 2,
"Description": "2h Meeting Betty (& Stijn) to fix status issue in mailings (SMEIMKT-9118)"
},
{
"Date": "18/09/25",
"TaskCode": "SMEIMKT-8161",
"Duration": 3,
"Description": "3h Pair prog with Mag to set templitor lambda to latest standards (SMEIMKT-8161)"
},
{
"Date": "19/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 8,
"Description": "8H Finished documenting after refactory, Recoded CNS-notifier with html parsing (first jsDOM, then converted to linkedom), added anonymous RCP, improved token lib, adapted hopper lambda, tested not OK (SMEIMKT-9078)"
},
{
"Date": "22/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 6,
"Description": "6H Debugged CNS-notifier & hopper, cleanup & deploy (SMEIMKT-9078)"
},
{
"Date": "22/09/25",
"TaskCode": "SMEIMKT-9124",
"Duration": 2,
"Description": "2H CSV crashes SMEIMKT-9124"
},
{
"Date": "23/09/25",
"TaskCode": "SMEIMKT-9124",
"Duration": 2,
"Description": "2H Proper release of CNS & Hopper & mtMailingModel with all latest changes & bugfixes in DEV (SMEIMKT-9124)"
},
{
"Date": "23/09/25",
"TaskCode": "SMEIMKT-8915",
"Duration": 2,
"Description": "2H Meeting Mike about bug in core SMEIMKT-8915"
},
{
"Date": "23/09/25",
"TaskCode": "SMEIMKT-9124",
"Duration": 3,
"Description": "3H started Mailing statistics FE side (SMEIMKT-9124)"
},
{
"Date": "23/09/25",
"TaskCode": "SMEIMKT-9137",
"Duration": 1,
"Description": "1H Meeting Mag. about Activity/Contact management Babystep (SMEIMKT-9137)"
},
{
"Date": "24/09/25",
"TaskCode": "SMEIMKT-9124",
"Duration": 3,
"Description": "3H Mailing statistics FE side (SMEIMKT-9124)"
},
{
"Date": "24/09/25",
"TaskCode": "SMEIMKT-8513",
"Duration": 1,
"Description": "1H Activity/Contact management meeting (SMEIMKT-8513)"
},
{
"Date": "24/09/25",
"TaskCode": "SMEIMKT-9137",
"Duration": 2.5,
"Description": "2.5H Meeting Steph, Manu, Mag bout API for baby-step (SMEIMKT-9137)"
},
{
"Date": "24/09/25",
"TaskCode": "SMEIMKT-9125",
"Duration": 1,
"Description": "1H Bug mailing Request review button doesn't hide (SMEIMKT-9125)"
},
{
"Date": "24/09/25",
"TaskCode": "SMEIMKT-9124",
"Duration": 0.5,
"Description": "0.5H Mailing statistics FE side (SMEIMKT-9124)"
},
{
"Date": "25/09/25",
"TaskCode": "SMEIMKT-8915",
"Duration": 4,
"Description": "4H Schedule bug (SMEIMKT-8915)"
},
{
"Date": "25/09/25",
"TaskCode": "SMEIMKT-9138",
"Duration": 1,
"Description": "1H Meeting Betty (Sparcomatic & release process) (SMEIMKT-9138)"
},
{
"Date": "25/09/25",
"TaskCode": "SMEIMKT-9138",
"Duration": 1,
"Description": "1H Meeting Structure templitor & release process (SMEIMKT-9138)"
},
{
"Date": "25/09/25",
"TaskCode": "SMEIMKT-9137",
"Duration": 1,
"Description": "1H Meeting Baby-step Activities API (SMEIMKT-9137)"
},
{
"Date": "25/09/25",
"TaskCode": "SMEIMKT-9124",
"Duration": 1,
"Description": "1H Mailing statistics FE side (single route for different views:1st unclean solution) (SMEIMKT-9124)"
},
{
"Date": "26/09/25",
"TaskCode": "SMEIMKT-9138",
"Duration": 2.5,
"Description": "2.5H Meeting about release process (SMEIMKT-9138)"
},
{
"Date": "26/09/25",
"TaskCode": "SMEIMKT-9124",
"Duration": 1.5,
"Description": "1.5H Mailing statistics FE side (SMEIMKT-9124)"
},
{
"Date": "26/09/25",
"TaskCode": "SMEIMKT-9138",
"Duration": 0.5,
"Description": "0.5H analyse Mikelkells release workflow (SMEIMKT-9138)"
},
{
"Date": "26/09/25",
"TaskCode": "SMEIMKT-9078",
"Duration": 0.5,
"Description": "0.5H Call Mag. about watermarking (SMEIMKT-9078)"
},
{
"Date": "26/09/25",
"TaskCode": "SMEIMKT-9124",
"Duration": 3,
"Description": "3H Mailing statistics FE side (single route for different views: solved) (SMEIMKT-9124)"
},
{
"Date": "29/09/25",
"TaskCode": "SMEIMKT-8513",
"Duration": 1,
"Description": "1H Activity/Contact management meeting (SMEIMKT-8513)"
},
{
"Date": "29/09/25",
"TaskCode": "SMEIMKT-9138",
"Duration": 0.5,
"Description": "0.5H Meeting Siggy about stabilization Release (SMEIMKT-9138)"
},
{
"Date": "29/09/25",
"TaskCode": "SMEIMKT-9136",
"Duration": 2.5,
"Description": "2.5H Timezone display SMEIMKT-9136"
},
{
"Date": "29/09/25",
"TaskCode": "SMEIMKT-9138",
"Duration": 1,
"Description": "1H Meeting release process (SMEIMKT-9138)"
},
{
"Date": "29/09/25",
"TaskCode": "SMEIMKT-9138",
"Duration": 1.75,
"Description": "1.75H Meeting Releases (SMEIMKT-9138)"
},
{
"Date": "29/09/25",
"TaskCode": "SMEIMKT-9124",
"Duration": 1.25,
"Description": "1.25H Mailing statistics FE side (SMEIMKT-9124)"
}
]

122
Jira_helper/converter.js Normal file
View File

@@ -0,0 +1,122 @@
import fs from 'fs'
import yargs from 'yargs/yargs'
import { hideBin } from 'yargs/helpers'
import { parse } from 'csv-parse/sync'
const argv = yargs(hideBin(process.argv)).command('Converter', 'Checks CSV for missig stuff, then converts CSV to Jiracula import file.', {})
.options({
'file': {
description: 'CSV file',
alias: 'f',
type: 'string',
demandOption: true,
},
'month': {
description: 'Month number to extract',
alias: 'm',
type: 'number',
demandOption: true,
}
}).help().version('1.0').argv
const colors = {
red: "\x1b[31m",
green: "\x1b[32m",
yellow: "\x1b[33m",
blue: "\x1b[34m",
magenta: "\x1b[35m",
cyan: "\x1b[36m",
bold: "\x1b[1m"
}
// Regex patterns
const durationPattern = /(\d+(?:[.,]\d+)?)\s*(h|H)/
const codePattern = /(SMEIMKT[- ]?\d+)/
// Read CSV
if(!fs.existsSync(argv.file)) {
console.error(`${colors.red}Cannot find this file !?`)
process.exit(1)
}
const csv = fs.readFileSync(argv.file, 'utf8')
const records = parse(csv, {
columns: false,
skip_empty_lines: true
})
// Skip header
const [header, ...rows] = records
// Transform
let results = []
for(let row of rows) {
if (row.length<3) continue
const date = row[0]
const tasksStr = row[2]
if (!tasksStr) continue
if(date.substring(3,5)!=String(argv.month).padStart(2, '0')) continue
const tasks = tasksStr.split('+').map(t => t.trim())
for (let t of tasks) {
const durationMatch = durationPattern.exec(t)
let duration = durationMatch ? durationMatch[1] : null
const codeMatch = codePattern.exec(t)
const code = codeMatch ? codeMatch[1].replace(' ', '-') : null
results.push({
Date: date.trim(),
TaskCode: code,
Duration: parseFloat(duration) || 0,
Description: t
})
}
}
let errs=0, tot=0, timePerDate = {}
results.forEach(r => {
if(!r.TaskCode){
errs++
} else if(!r.Duration){
errs++
} else {
if(r.Date in timePerDate) timePerDate[r.Date] += r.Duration
else timePerDate[r.Date] = r.Duration
}
console.log(
`${colors.cyan}${r.Date} ` +
`${(r.TaskCode) ? colors.yellow+r.TaskCode : colors.red+'Missing Code'} ` +
`${(r.Duration) ? colors.yellow+r.Duration : colors.red+'Missing Duration'} ` +
`${colors.green}${r.Description}`
)
tot++
})
if(errs>0){
console.log(`\n${colors.red}${errs} parsing errors on ${tot} tasks.${colors.green}\n`)
} else {
console.log(`\n${colors.green}${tot} task entries, no parsing errors.\n`)
}
const badDailyHours = {}
let billable = 0
for(let dat in timePerDate){
if((timePerDate[dat] != 8) && (timePerDate[dat] != 4)) badDailyHours[dat] = timePerDate[dat]
else billable += (timePerDate[dat]/8)
}
if(Object.keys(badDailyHours).length>0){
console.log(`${colors.red}Some dates don't have 8H...\n`)
for(let dat in badDailyHours){
console.log(`${colors.red} ${dat}: ${badDailyHours[dat]}H`)
}
} else {
console.log(`\n${colors.green}Ready to export to Jiracula !`)
console.log(`Total billable days : ${billable}\n`)
const outfile = argv.file.substring(0, argv.file.lastIndexOf('.'))+'.jrcl.json'
fs.writeFileSync(outfile, JSON.stringify(results, null, 2), 'utf8')
}

171
Jira_helper/node_modules/.package-lock.json generated vendored Normal file
View File

@@ -0,0 +1,171 @@
{
"name": "Jira_helper",
"lockfileVersion": 3,
"requires": true,
"packages": {
"node_modules/ansi-regex": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/ansi-styles": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/cliui": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz",
"integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==",
"license": "ISC",
"dependencies": {
"string-width": "^7.2.0",
"strip-ansi": "^7.1.0",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=20"
}
},
"node_modules/csv-parse": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-6.1.0.tgz",
"integrity": "sha512-CEE+jwpgLn+MmtCpVcPtiCZpVtB6Z2OKPTr34pycYYoL7sxdOkXDdQ4lRiw6ioC0q6BLqhc6cKweCVvral8yhw==",
"license": "MIT"
},
"node_modules/emoji-regex": {
"version": "10.5.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.5.0.tgz",
"integrity": "sha512-lb49vf1Xzfx080OKA0o6l8DQQpV+6Vg95zyCJX9VB/BqKYlhG7N4wgROUUHRA+ZPUefLnteQOad7z1kT2bV7bg==",
"license": "MIT"
},
"node_modules/escalade": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
"integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/get-caller-file": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
"license": "ISC",
"engines": {
"node": "6.* || 8.* || >= 10.*"
}
},
"node_modules/get-east-asian-width": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
"integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/strip-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
"integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/wrap-ansi": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
"integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/y18n": {
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
"license": "ISC",
"engines": {
"node": ">=10"
}
},
"node_modules/yargs": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-18.0.0.tgz",
"integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==",
"license": "MIT",
"dependencies": {
"cliui": "^9.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"string-width": "^7.2.0",
"y18n": "^5.0.5",
"yargs-parser": "^22.0.0"
},
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=23"
}
},
"node_modules/yargs-parser": {
"version": "22.0.0",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-22.0.0.tgz",
"integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==",
"license": "ISC",
"engines": {
"node": "^20.19.0 || ^22.12.0 || >=23"
}
}
}
}

33
Jira_helper/node_modules/ansi-regex/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,33 @@
export type Options = {
/**
Match only the first ANSI escape.
@default false
*/
readonly onlyFirst: boolean;
};
/**
Regular expression for matching ANSI escape codes.
@example
```
import ansiRegex from 'ansi-regex';
ansiRegex().test('\u001B[4mcake\u001B[0m');
//=> true
ansiRegex().test('cake');
//=> false
'\u001B[4mcake\u001B[0m'.match(ansiRegex());
//=> ['\u001B[4m', '\u001B[0m']
'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true}));
//=> ['\u001B[4m']
'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex());
//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007']
```
*/
export default function ansiRegex(options?: Options): RegExp;

14
Jira_helper/node_modules/ansi-regex/index.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
export default function ansiRegex({onlyFirst = false} = {}) {
// Valid string terminator sequences are BEL, ESC\, and 0x9c
const ST = '(?:\\u0007|\\u001B\\u005C|\\u009C)';
// OSC sequences only: ESC ] ... ST (non-greedy until the first ST)
const osc = `(?:\\u001B\\][\\s\\S]*?${ST})`;
// CSI and related: ESC/C1, optional intermediates, optional params (supports ; and :) then final byte
const csi = '[\\u001B\\u009B][[\\]()#;?]*(?:\\d{1,4}(?:[;:]\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]';
const pattern = `${osc}|${csi}`;
return new RegExp(pattern, onlyFirst ? undefined : 'g');
}

9
Jira_helper/node_modules/ansi-regex/license generated vendored Normal file
View File

@@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

61
Jira_helper/node_modules/ansi-regex/package.json generated vendored Normal file
View File

@@ -0,0 +1,61 @@
{
"name": "ansi-regex",
"version": "6.2.2",
"description": "Regular expression for matching ANSI escape codes",
"license": "MIT",
"repository": "chalk/ansi-regex",
"funding": "https://github.com/chalk/ansi-regex?sponsor=1",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"types": "./index.d.ts",
"sideEffects": false,
"engines": {
"node": ">=12"
},
"scripts": {
"test": "xo && ava && tsd",
"view-supported": "node fixtures/view-codes.js"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"command-line",
"text",
"regex",
"regexp",
"re",
"match",
"test",
"find",
"pattern"
],
"devDependencies": {
"ansi-escapes": "^5.0.0",
"ava": "^3.15.0",
"tsd": "^0.21.0",
"xo": "^0.54.2"
}
}

66
Jira_helper/node_modules/ansi-regex/readme.md generated vendored Normal file
View File

@@ -0,0 +1,66 @@
# ansi-regex
> Regular expression for matching [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code)
## Install
```sh
npm install ansi-regex
```
## Usage
```js
import ansiRegex from 'ansi-regex';
ansiRegex().test('\u001B[4mcake\u001B[0m');
//=> true
ansiRegex().test('cake');
//=> false
'\u001B[4mcake\u001B[0m'.match(ansiRegex());
//=> ['\u001B[4m', '\u001B[0m']
'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true}));
//=> ['\u001B[4m']
'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex());
//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007']
```
## API
### ansiRegex(options?)
Returns a regex for matching ANSI escape codes.
#### options
Type: `object`
##### onlyFirst
Type: `boolean`\
Default: `false` *(Matches any ANSI escape codes in a string)*
Match only the first ANSI escape.
## Important
If you run the regex against untrusted user input in a server context, you should [give it a timeout](https://github.com/sindresorhus/super-regex).
**I do not consider [ReDoS](https://blog.yossarian.net/2022/12/28/ReDoS-vulnerabilities-and-misaligned-incentives) a valid vulnerability for this package.**
## FAQ
### Why do you test for codes not in the ECMA 48 standard?
Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. We test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them.
On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out.
## Maintainers
- [Sindre Sorhus](https://github.com/sindresorhus)
- [Josh Junon](https://github.com/qix-)

236
Jira_helper/node_modules/ansi-styles/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,236 @@
export type CSPair = { // eslint-disable-line @typescript-eslint/naming-convention
/**
The ANSI terminal control sequence for starting this style.
*/
readonly open: string;
/**
The ANSI terminal control sequence for ending this style.
*/
readonly close: string;
};
export type ColorBase = {
/**
The ANSI terminal control sequence for ending this color.
*/
readonly close: string;
ansi(code: number): string;
ansi256(code: number): string;
ansi16m(red: number, green: number, blue: number): string;
};
export type Modifier = {
/**
Resets the current color chain.
*/
readonly reset: CSPair;
/**
Make text bold.
*/
readonly bold: CSPair;
/**
Emitting only a small amount of light.
*/
readonly dim: CSPair;
/**
Make text italic. (Not widely supported)
*/
readonly italic: CSPair;
/**
Make text underline. (Not widely supported)
*/
readonly underline: CSPair;
/**
Make text overline.
Supported on VTE-based terminals, the GNOME terminal, mintty, and Git Bash.
*/
readonly overline: CSPair;
/**
Inverse background and foreground colors.
*/
readonly inverse: CSPair;
/**
Prints the text, but makes it invisible.
*/
readonly hidden: CSPair;
/**
Puts a horizontal line through the center of the text. (Not widely supported)
*/
readonly strikethrough: CSPair;
};
export type ForegroundColor = {
readonly black: CSPair;
readonly red: CSPair;
readonly green: CSPair;
readonly yellow: CSPair;
readonly blue: CSPair;
readonly cyan: CSPair;
readonly magenta: CSPair;
readonly white: CSPair;
/**
Alias for `blackBright`.
*/
readonly gray: CSPair;
/**
Alias for `blackBright`.
*/
readonly grey: CSPair;
readonly blackBright: CSPair;
readonly redBright: CSPair;
readonly greenBright: CSPair;
readonly yellowBright: CSPair;
readonly blueBright: CSPair;
readonly cyanBright: CSPair;
readonly magentaBright: CSPair;
readonly whiteBright: CSPair;
};
export type BackgroundColor = {
readonly bgBlack: CSPair;
readonly bgRed: CSPair;
readonly bgGreen: CSPair;
readonly bgYellow: CSPair;
readonly bgBlue: CSPair;
readonly bgCyan: CSPair;
readonly bgMagenta: CSPair;
readonly bgWhite: CSPair;
/**
Alias for `bgBlackBright`.
*/
readonly bgGray: CSPair;
/**
Alias for `bgBlackBright`.
*/
readonly bgGrey: CSPair;
readonly bgBlackBright: CSPair;
readonly bgRedBright: CSPair;
readonly bgGreenBright: CSPair;
readonly bgYellowBright: CSPair;
readonly bgBlueBright: CSPair;
readonly bgCyanBright: CSPair;
readonly bgMagentaBright: CSPair;
readonly bgWhiteBright: CSPair;
};
export type ConvertColor = {
/**
Convert from the RGB color space to the ANSI 256 color space.
@param red - (`0...255`)
@param green - (`0...255`)
@param blue - (`0...255`)
*/
rgbToAnsi256(red: number, green: number, blue: number): number;
/**
Convert from the RGB HEX color space to the RGB color space.
@param hex - A hexadecimal string containing RGB data.
*/
hexToRgb(hex: string): [red: number, green: number, blue: number];
/**
Convert from the RGB HEX color space to the ANSI 256 color space.
@param hex - A hexadecimal string containing RGB data.
*/
hexToAnsi256(hex: string): number;
/**
Convert from the ANSI 256 color space to the ANSI 16 color space.
@param code - A number representing the ANSI 256 color.
*/
ansi256ToAnsi(code: number): number;
/**
Convert from the RGB color space to the ANSI 16 color space.
@param red - (`0...255`)
@param green - (`0...255`)
@param blue - (`0...255`)
*/
rgbToAnsi(red: number, green: number, blue: number): number;
/**
Convert from the RGB HEX color space to the ANSI 16 color space.
@param hex - A hexadecimal string containing RGB data.
*/
hexToAnsi(hex: string): number;
};
/**
Basic modifier names.
*/
export type ModifierName = keyof Modifier;
/**
Basic foreground color names.
[More colors here.](https://github.com/chalk/chalk/blob/main/readme.md#256-and-truecolor-color-support)
*/
export type ForegroundColorName = keyof ForegroundColor;
/**
Basic background color names.
[More colors here.](https://github.com/chalk/chalk/blob/main/readme.md#256-and-truecolor-color-support)
*/
export type BackgroundColorName = keyof BackgroundColor;
/**
Basic color names. The combination of foreground and background color names.
[More colors here.](https://github.com/chalk/chalk/blob/main/readme.md#256-and-truecolor-color-support)
*/
export type ColorName = ForegroundColorName | BackgroundColorName;
/**
Basic modifier names.
*/
export const modifierNames: readonly ModifierName[];
/**
Basic foreground color names.
*/
export const foregroundColorNames: readonly ForegroundColorName[];
/**
Basic background color names.
*/
export const backgroundColorNames: readonly BackgroundColorName[];
/*
Basic color names. The combination of foreground and background color names.
*/
export const colorNames: readonly ColorName[];
declare const ansiStyles: {
readonly modifier: Modifier;
readonly color: ColorBase & ForegroundColor;
readonly bgColor: ColorBase & BackgroundColor;
readonly codes: ReadonlyMap<number, number>;
} & ForegroundColor & BackgroundColor & Modifier & ConvertColor;
export default ansiStyles;

223
Jira_helper/node_modules/ansi-styles/index.js generated vendored Normal file
View File

@@ -0,0 +1,223 @@
const ANSI_BACKGROUND_OFFSET = 10;
const wrapAnsi16 = (offset = 0) => code => `\u001B[${code + offset}m`;
const wrapAnsi256 = (offset = 0) => code => `\u001B[${38 + offset};5;${code}m`;
const wrapAnsi16m = (offset = 0) => (red, green, blue) => `\u001B[${38 + offset};2;${red};${green};${blue}m`;
const styles = {
modifier: {
reset: [0, 0],
// 21 isn't widely supported and 22 does the same thing
bold: [1, 22],
dim: [2, 22],
italic: [3, 23],
underline: [4, 24],
overline: [53, 55],
inverse: [7, 27],
hidden: [8, 28],
strikethrough: [9, 29],
},
color: {
black: [30, 39],
red: [31, 39],
green: [32, 39],
yellow: [33, 39],
blue: [34, 39],
magenta: [35, 39],
cyan: [36, 39],
white: [37, 39],
// Bright color
blackBright: [90, 39],
gray: [90, 39], // Alias of `blackBright`
grey: [90, 39], // Alias of `blackBright`
redBright: [91, 39],
greenBright: [92, 39],
yellowBright: [93, 39],
blueBright: [94, 39],
magentaBright: [95, 39],
cyanBright: [96, 39],
whiteBright: [97, 39],
},
bgColor: {
bgBlack: [40, 49],
bgRed: [41, 49],
bgGreen: [42, 49],
bgYellow: [43, 49],
bgBlue: [44, 49],
bgMagenta: [45, 49],
bgCyan: [46, 49],
bgWhite: [47, 49],
// Bright color
bgBlackBright: [100, 49],
bgGray: [100, 49], // Alias of `bgBlackBright`
bgGrey: [100, 49], // Alias of `bgBlackBright`
bgRedBright: [101, 49],
bgGreenBright: [102, 49],
bgYellowBright: [103, 49],
bgBlueBright: [104, 49],
bgMagentaBright: [105, 49],
bgCyanBright: [106, 49],
bgWhiteBright: [107, 49],
},
};
export const modifierNames = Object.keys(styles.modifier);
export const foregroundColorNames = Object.keys(styles.color);
export const backgroundColorNames = Object.keys(styles.bgColor);
export const colorNames = [...foregroundColorNames, ...backgroundColorNames];
function assembleStyles() {
const codes = new Map();
for (const [groupName, group] of Object.entries(styles)) {
for (const [styleName, style] of Object.entries(group)) {
styles[styleName] = {
open: `\u001B[${style[0]}m`,
close: `\u001B[${style[1]}m`,
};
group[styleName] = styles[styleName];
codes.set(style[0], style[1]);
}
Object.defineProperty(styles, groupName, {
value: group,
enumerable: false,
});
}
Object.defineProperty(styles, 'codes', {
value: codes,
enumerable: false,
});
styles.color.close = '\u001B[39m';
styles.bgColor.close = '\u001B[49m';
styles.color.ansi = wrapAnsi16();
styles.color.ansi256 = wrapAnsi256();
styles.color.ansi16m = wrapAnsi16m();
styles.bgColor.ansi = wrapAnsi16(ANSI_BACKGROUND_OFFSET);
styles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET);
styles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET);
// From https://github.com/Qix-/color-convert/blob/3f0e0d4e92e235796ccb17f6e85c72094a651f49/conversions.js
Object.defineProperties(styles, {
rgbToAnsi256: {
value(red, green, blue) {
// We use the extended greyscale palette here, with the exception of
// black and white. normal palette only has 4 greyscale shades.
if (red === green && green === blue) {
if (red < 8) {
return 16;
}
if (red > 248) {
return 231;
}
return Math.round(((red - 8) / 247) * 24) + 232;
}
return 16
+ (36 * Math.round(red / 255 * 5))
+ (6 * Math.round(green / 255 * 5))
+ Math.round(blue / 255 * 5);
},
enumerable: false,
},
hexToRgb: {
value(hex) {
const matches = /[a-f\d]{6}|[a-f\d]{3}/i.exec(hex.toString(16));
if (!matches) {
return [0, 0, 0];
}
let [colorString] = matches;
if (colorString.length === 3) {
colorString = [...colorString].map(character => character + character).join('');
}
const integer = Number.parseInt(colorString, 16);
return [
/* eslint-disable no-bitwise */
(integer >> 16) & 0xFF,
(integer >> 8) & 0xFF,
integer & 0xFF,
/* eslint-enable no-bitwise */
];
},
enumerable: false,
},
hexToAnsi256: {
value: hex => styles.rgbToAnsi256(...styles.hexToRgb(hex)),
enumerable: false,
},
ansi256ToAnsi: {
value(code) {
if (code < 8) {
return 30 + code;
}
if (code < 16) {
return 90 + (code - 8);
}
let red;
let green;
let blue;
if (code >= 232) {
red = (((code - 232) * 10) + 8) / 255;
green = red;
blue = red;
} else {
code -= 16;
const remainder = code % 36;
red = Math.floor(code / 36) / 5;
green = Math.floor(remainder / 6) / 5;
blue = (remainder % 6) / 5;
}
const value = Math.max(red, green, blue) * 2;
if (value === 0) {
return 30;
}
// eslint-disable-next-line no-bitwise
let result = 30 + ((Math.round(blue) << 2) | (Math.round(green) << 1) | Math.round(red));
if (value === 2) {
result += 60;
}
return result;
},
enumerable: false,
},
rgbToAnsi: {
value: (red, green, blue) => styles.ansi256ToAnsi(styles.rgbToAnsi256(red, green, blue)),
enumerable: false,
},
hexToAnsi: {
value: hex => styles.ansi256ToAnsi(styles.hexToAnsi256(hex)),
enumerable: false,
},
});
return styles;
}
const ansiStyles = assembleStyles();
export default ansiStyles;

9
Jira_helper/node_modules/ansi-styles/license generated vendored Normal file
View File

@@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

54
Jira_helper/node_modules/ansi-styles/package.json generated vendored Normal file
View File

@@ -0,0 +1,54 @@
{
"name": "ansi-styles",
"version": "6.2.3",
"description": "ANSI escape codes for styling strings in the terminal",
"license": "MIT",
"repository": "chalk/ansi-styles",
"funding": "https://github.com/chalk/ansi-styles?sponsor=1",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"engines": {
"node": ">=12"
},
"scripts": {
"test": "xo && ava && tsd",
"screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"log",
"logging",
"command-line",
"text"
],
"devDependencies": {
"ava": "^6.1.3",
"svg-term-cli": "^2.1.1",
"tsd": "^0.31.1",
"xo": "^0.58.0"
}
}

173
Jira_helper/node_modules/ansi-styles/readme.md generated vendored Normal file
View File

@@ -0,0 +1,173 @@
# ansi-styles
> [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal
You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings.
![](screenshot.png)
## Install
```sh
npm install ansi-styles
```
## Usage
```js
import styles from 'ansi-styles';
console.log(`${styles.green.open}Hello world!${styles.green.close}`);
// Color conversion between 256/truecolor
// NOTE: When converting from truecolor to 256 colors, the original color
// may be degraded to fit the new color palette. This means terminals
// that do not support 16 million colors will best-match the
// original color.
console.log(`${styles.color.ansi(styles.rgbToAnsi(199, 20, 250))}Hello World${styles.color.close}`)
console.log(`${styles.color.ansi256(styles.rgbToAnsi256(199, 20, 250))}Hello World${styles.color.close}`)
console.log(`${styles.color.ansi16m(...styles.hexToRgb('#abcdef'))}Hello World${styles.color.close}`)
```
## API
### `open` and `close`
Each style has an `open` and `close` property.
### `modifierNames`, `foregroundColorNames`, `backgroundColorNames`, and `colorNames`
All supported style strings are exposed as an array of strings for convenience. `colorNames` is the combination of `foregroundColorNames` and `backgroundColorNames`.
This can be useful if you need to validate input:
```js
import {modifierNames, foregroundColorNames} from 'ansi-styles';
console.log(modifierNames.includes('bold'));
//=> true
console.log(foregroundColorNames.includes('pink'));
//=> false
```
## Styles
### Modifiers
- `reset`
- `bold`
- `dim`
- `italic` *(Not widely supported)*
- `underline`
- `overline` *Supported on VTE-based terminals, the GNOME terminal, mintty, and Git Bash.*
- `inverse`
- `hidden`
- `strikethrough` *(Not widely supported)*
### Colors
- `black`
- `red`
- `green`
- `yellow`
- `blue`
- `magenta`
- `cyan`
- `white`
- `blackBright` (alias: `gray`, `grey`)
- `redBright`
- `greenBright`
- `yellowBright`
- `blueBright`
- `magentaBright`
- `cyanBright`
- `whiteBright`
### Background colors
- `bgBlack`
- `bgRed`
- `bgGreen`
- `bgYellow`
- `bgBlue`
- `bgMagenta`
- `bgCyan`
- `bgWhite`
- `bgBlackBright` (alias: `bgGray`, `bgGrey`)
- `bgRedBright`
- `bgGreenBright`
- `bgYellowBright`
- `bgBlueBright`
- `bgMagentaBright`
- `bgCyanBright`
- `bgWhiteBright`
## Advanced usage
By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module.
- `styles.modifier`
- `styles.color`
- `styles.bgColor`
###### Example
```js
import styles from 'ansi-styles';
console.log(styles.color.green.open);
```
Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `styles.codes`, which returns a `Map` with the open codes as keys and close codes as values.
###### Example
```js
import styles from 'ansi-styles';
console.log(styles.codes.get(36));
//=> 39
```
## 16 / 256 / 16 million (TrueColor) support
`ansi-styles` allows converting between various color formats and ANSI escapes, with support for 16, 256 and [16 million colors](https://gist.github.com/XVilka/8346728).
The following color spaces are supported:
- `rgb`
- `hex`
- `ansi256`
- `ansi`
To use these, call the associated conversion function with the intended output, for example:
```js
import styles from 'ansi-styles';
styles.color.ansi(styles.rgbToAnsi(100, 200, 15)); // RGB to 16 color ansi foreground code
styles.bgColor.ansi(styles.hexToAnsi('#C0FFEE')); // HEX to 16 color ansi foreground code
styles.color.ansi256(styles.rgbToAnsi256(100, 200, 15)); // RGB to 256 color ansi foreground code
styles.bgColor.ansi256(styles.hexToAnsi256('#C0FFEE')); // HEX to 256 color ansi foreground code
styles.color.ansi16m(100, 200, 15); // RGB to 16 million color foreground code
styles.bgColor.ansi16m(...styles.hexToRgb('#C0FFEE')); // Hex (RGB) to 16 million color foreground code
```
## Related
- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal
## Maintainers
- [Sindre Sorhus](https://github.com/sindresorhus)
- [Josh Junon](https://github.com/qix-)
## For enterprise
Available as part of the Tidelift Subscription.
The maintainers of `ansi-styles` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-ansi-styles?utm_source=npm-ansi-styles&utm_medium=referral&utm_campaign=enterprise&utm_term=repo)

157
Jira_helper/node_modules/cliui/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,157 @@
# Change Log
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
## [9.0.1](https://github.com/yargs/cliui/compare/v9.0.0...v9.0.1) (2025-03-17)
### Bug Fixes
* make require("cliui") work as expected for CJS ([04ccc25](https://github.com/yargs/cliui/commit/04ccc250e30a059292c03fa1ef0a8661f8d93dfe))
## [9.0.0](https://github.com/yargs/cliui/compare/v8.0.1...v9.0.0) (2025-03-16)
### ⚠ BREAKING CHANGES
* cliui is now ESM only ([#165](https://github.com/yargs/cliui/issues/165))
### Features
* cliui is now ESM only ([#165](https://github.com/yargs/cliui/issues/165)) ([5a521de](https://github.com/yargs/cliui/commit/5a521de7ea88f262236394c8d96775bcf50ff0a4))
## [8.0.1](https://github.com/yargs/cliui/compare/v8.0.0...v8.0.1) (2022-10-01)
### Bug Fixes
* **deps:** move rollup-plugin-ts to dev deps ([#124](https://github.com/yargs/cliui/issues/124)) ([7c8bd6b](https://github.com/yargs/cliui/commit/7c8bd6ba024d61e4eeae310c7959ab8ab6829081))
## [8.0.0](https://github.com/yargs/cliui/compare/v7.0.4...v8.0.0) (2022-09-30)
### ⚠ BREAKING CHANGES
* **deps:** drop Node 10 to release CVE-2021-3807 patch (#122)
### Bug Fixes
* **deps:** drop Node 10 to release CVE-2021-3807 patch ([#122](https://github.com/yargs/cliui/issues/122)) ([f156571](https://github.com/yargs/cliui/commit/f156571ce4f2ebf313335e3a53ad905589da5a30))
### [7.0.4](https://www.github.com/yargs/cliui/compare/v7.0.3...v7.0.4) (2020-11-08)
### Bug Fixes
* **deno:** import UIOptions from definitions ([#97](https://www.github.com/yargs/cliui/issues/97)) ([f04f343](https://www.github.com/yargs/cliui/commit/f04f3439bc78114c7e90f82ff56f5acf16268ea8))
### [7.0.3](https://www.github.com/yargs/cliui/compare/v7.0.2...v7.0.3) (2020-10-16)
### Bug Fixes
* **exports:** node 13.0 and 13.1 require the dotted object form _with_ a string fallback ([#93](https://www.github.com/yargs/cliui/issues/93)) ([eca16fc](https://www.github.com/yargs/cliui/commit/eca16fc05d26255df3280906c36d7f0e5b05c6e9))
### [7.0.2](https://www.github.com/yargs/cliui/compare/v7.0.1...v7.0.2) (2020-10-14)
### Bug Fixes
* **exports:** node 13.0-13.6 require a string fallback ([#91](https://www.github.com/yargs/cliui/issues/91)) ([b529d7e](https://www.github.com/yargs/cliui/commit/b529d7e432901af1af7848b23ed6cf634497d961))
### [7.0.1](https://www.github.com/yargs/cliui/compare/v7.0.0...v7.0.1) (2020-08-16)
### Bug Fixes
* **build:** main should be build/index.cjs ([dc29a3c](https://www.github.com/yargs/cliui/commit/dc29a3cc617a410aa850e06337b5954b04f2cb4d))
## [7.0.0](https://www.github.com/yargs/cliui/compare/v6.0.0...v7.0.0) (2020-08-16)
### ⚠ BREAKING CHANGES
* tsc/ESM/Deno support (#82)
* modernize deps and build (#80)
### Build System
* modernize deps and build ([#80](https://www.github.com/yargs/cliui/issues/80)) ([339d08d](https://www.github.com/yargs/cliui/commit/339d08dc71b15a3928aeab09042af94db2f43743))
### Code Refactoring
* tsc/ESM/Deno support ([#82](https://www.github.com/yargs/cliui/issues/82)) ([4b777a5](https://www.github.com/yargs/cliui/commit/4b777a5fe01c5d8958c6708695d6aab7dbe5706c))
## [6.0.0](https://www.github.com/yargs/cliui/compare/v5.0.0...v6.0.0) (2019-11-10)
### ⚠ BREAKING CHANGES
* update deps, drop Node 6
### Code Refactoring
* update deps, drop Node 6 ([62056df](https://www.github.com/yargs/cliui/commit/62056df))
## [5.0.0](https://github.com/yargs/cliui/compare/v4.1.0...v5.0.0) (2019-04-10)
### Bug Fixes
* Update wrap-ansi to fix compatibility with latest versions of chalk. ([#60](https://github.com/yargs/cliui/issues/60)) ([7bf79ae](https://github.com/yargs/cliui/commit/7bf79ae))
### BREAKING CHANGES
* Drop support for node < 6.
<a name="4.1.0"></a>
## [4.1.0](https://github.com/yargs/cliui/compare/v4.0.0...v4.1.0) (2018-04-23)
### Features
* add resetOutput method ([#57](https://github.com/yargs/cliui/issues/57)) ([7246902](https://github.com/yargs/cliui/commit/7246902))
<a name="4.0.0"></a>
## [4.0.0](https://github.com/yargs/cliui/compare/v3.2.0...v4.0.0) (2017-12-18)
### Bug Fixes
* downgrades strip-ansi to version 3.0.1 ([#54](https://github.com/yargs/cliui/issues/54)) ([5764c46](https://github.com/yargs/cliui/commit/5764c46))
* set env variable FORCE_COLOR. ([#56](https://github.com/yargs/cliui/issues/56)) ([7350e36](https://github.com/yargs/cliui/commit/7350e36))
### Chores
* drop support for node < 4 ([#53](https://github.com/yargs/cliui/issues/53)) ([b105376](https://github.com/yargs/cliui/commit/b105376))
### Features
* add fallback for window width ([#45](https://github.com/yargs/cliui/issues/45)) ([d064922](https://github.com/yargs/cliui/commit/d064922))
### BREAKING CHANGES
* officially drop support for Node < 4
<a name="3.2.0"></a>
## [3.2.0](https://github.com/yargs/cliui/compare/v3.1.2...v3.2.0) (2016-04-11)
### Bug Fixes
* reduces tarball size ([acc6c33](https://github.com/yargs/cliui/commit/acc6c33))
### Features
* adds standard-version for release management ([ff84e32](https://github.com/yargs/cliui/commit/ff84e32))

14
Jira_helper/node_modules/cliui/LICENSE.txt generated vendored Normal file
View File

@@ -0,0 +1,14 @@
Copyright (c) 2015, Contributors
Permission to use, copy, modify, and/or distribute this software
for any purpose with or without fee is hereby granted, provided
that the above copyright notice and this permission notice
appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE
LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES
OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

161
Jira_helper/node_modules/cliui/README.md generated vendored Normal file
View File

@@ -0,0 +1,161 @@
# cliui
![ci](https://github.com/yargs/cliui/workflows/ci/badge.svg)
[![NPM version](https://img.shields.io/npm/v/cliui.svg)](https://www.npmjs.com/package/cliui)
[![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-yellow.svg)](https://conventionalcommits.org)
![nycrc config on GitHub](https://img.shields.io/nycrc/yargs/cliui)
easily create complex multi-column command-line-interfaces.
## Example
```bash
npm i cliui@latest chalk@latest
```
```js
const ui = require('cliui')()
const {Chalk} = require('chalk');
const chalk = new Chalk();
ui.div('Usage: $0 [command] [options]')
ui.div({
text: 'Options:',
padding: [2, 0, 1, 0]
})
ui.div(
{
text: "-f, --file",
width: 20,
padding: [0, 4, 0, 4]
},
{
text: "the file to load." +
chalk.green("(if this description is long it wraps).")
,
width: 20
},
{
text: chalk.red("[required]"),
align: 'right'
}
)
console.log(ui.toString())
```
## Deno/ESM Support
As of `v7` `cliui` supports [Deno](https://github.com/denoland/deno) and
[ESM](https://nodejs.org/api/esm.html#esm_ecmascript_modules):
```typescript
import cliui from "cliui";
import chalk from "chalk";
// Deno: import cliui from "https://deno.land/x/cliui/deno.ts";
const ui = cliui({})
ui.div('Usage: $0 [command] [options]')
ui.div({
text: 'Options:',
padding: [2, 0, 1, 0]
})
ui.div(
{
text: "-f, --file",
width: 20,
padding: [0, 4, 0, 4]
},
{
text: "the file to load." +
chalk.green("(if this description is long it wraps).")
,
width: 20
},
{
text: chalk.red("[required]"),
align: 'right'
}
)
console.log(ui.toString())
```
<img width="500" src="screenshot.png">
## Layout DSL
cliui exposes a simple layout DSL:
If you create a single `ui.div`, passing a string rather than an
object:
* `\n`: characters will be interpreted as new rows.
* `\t`: characters will be interpreted as new columns.
* `\s`: characters will be interpreted as padding.
**as an example...**
```js
var ui = require('./')({
width: 60
})
ui.div(
'Usage: node ./bin/foo.js\n' +
' <regex>\t provide a regex\n' +
' <glob>\t provide a glob\t [required]'
)
console.log(ui.toString())
```
**will output:**
```shell
Usage: node ./bin/foo.js
<regex> provide a regex
<glob> provide a glob [required]
```
## Methods
```js
cliui = require('cliui')
```
### cliui({width: integer})
Specify the maximum width of the UI being generated.
If no width is provided, cliui will try to get the current window's width and use it, and if that doesn't work, width will be set to `80`.
### cliui({wrap: boolean})
Enable or disable the wrapping of text in a column.
### cliui.div(column, column, column)
Create a row with any number of columns, a column
can either be a string, or an object with the following
options:
* **text:** some text to place in the column.
* **width:** the width of a column.
* **align:** alignment, `right` or `center`.
* **padding:** `[top, right, bottom, left]`.
* **border:** should a border be placed around the div?
### cliui.span(column, column, column)
Similar to `div`, except the next row will be appended without
a new line being created.
### cliui.resetOutput()
Resets the UI elements of the current cliui instance, maintaining the values
set for `width` and `wrap`.

287
Jira_helper/node_modules/cliui/build/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,287 @@
'use strict';
const align = {
right: alignRight,
center: alignCenter
};
const top = 0;
const right = 1;
const bottom = 2;
const left = 3;
export class UI {
constructor(opts) {
var _a;
this.width = opts.width;
this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true;
this.rows = [];
}
span(...args) {
const cols = this.div(...args);
cols.span = true;
}
resetOutput() {
this.rows = [];
}
div(...args) {
if (args.length === 0) {
this.div('');
}
if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') {
return this.applyLayoutDSL(args[0]);
}
const cols = args.map(arg => {
if (typeof arg === 'string') {
return this.colFromString(arg);
}
return arg;
});
this.rows.push(cols);
return cols;
}
shouldApplyLayoutDSL(...args) {
return args.length === 1 && typeof args[0] === 'string' &&
/[\t\n]/.test(args[0]);
}
applyLayoutDSL(str) {
const rows = str.split('\n').map(row => row.split('\t'));
let leftColumnWidth = 0;
// simple heuristic for layout, make sure the
// second column lines up along the left-hand.
// don't allow the first column to take up more
// than 50% of the screen.
rows.forEach(columns => {
if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) {
leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0]));
}
});
// generate a table:
// replacing ' ' with padding calculations.
// using the algorithmically generated width.
rows.forEach(columns => {
this.div(...columns.map((r, i) => {
return {
text: r.trim(),
padding: this.measurePadding(r),
width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined
};
}));
});
return this.rows[this.rows.length - 1];
}
colFromString(text) {
return {
text,
padding: this.measurePadding(text)
};
}
measurePadding(str) {
// measure padding without ansi escape codes
const noAnsi = mixin.stripAnsi(str);
return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length];
}
toString() {
const lines = [];
this.rows.forEach(row => {
this.rowToString(row, lines);
});
// don't display any lines with the
// hidden flag set.
return lines
.filter(line => !line.hidden)
.map(line => line.text)
.join('\n');
}
rowToString(row, lines) {
this.rasterize(row).forEach((rrow, r) => {
let str = '';
rrow.forEach((col, c) => {
const { width } = row[c]; // the width with padding.
const wrapWidth = this.negatePadding(row[c]); // the width without padding.
let ts = col; // temporary string used during alignment/padding.
if (wrapWidth > mixin.stringWidth(col)) {
ts += ' '.repeat(wrapWidth - mixin.stringWidth(col));
}
// align the string within its column.
if (row[c].align && row[c].align !== 'left' && this.wrap) {
const fn = align[row[c].align];
ts = fn(ts, wrapWidth);
if (mixin.stringWidth(ts) < wrapWidth) {
ts += ' '.repeat((width || 0) - mixin.stringWidth(ts) - 1);
}
}
// apply border and padding to string.
const padding = row[c].padding || [0, 0, 0, 0];
if (padding[left]) {
str += ' '.repeat(padding[left]);
}
str += addBorder(row[c], ts, '| ');
str += ts;
str += addBorder(row[c], ts, ' |');
if (padding[right]) {
str += ' '.repeat(padding[right]);
}
// if prior row is span, try to render the
// current row on the prior line.
if (r === 0 && lines.length > 0) {
str = this.renderInline(str, lines[lines.length - 1]);
}
});
// remove trailing whitespace.
lines.push({
text: str.replace(/ +$/, ''),
span: row.span
});
});
return lines;
}
// if the full 'source' can render in
// the target line, do so.
renderInline(source, previousLine) {
const match = source.match(/^ */);
const leadingWhitespace = match ? match[0].length : 0;
const target = previousLine.text;
const targetTextWidth = mixin.stringWidth(target.trimRight());
if (!previousLine.span) {
return source;
}
// if we're not applying wrapping logic,
// just always append to the span.
if (!this.wrap) {
previousLine.hidden = true;
return target + source;
}
if (leadingWhitespace < targetTextWidth) {
return source;
}
previousLine.hidden = true;
return target.trimRight() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimLeft();
}
rasterize(row) {
const rrows = [];
const widths = this.columnWidths(row);
let wrapped;
// word wrap all columns, and create
// a data-structure that is easy to rasterize.
row.forEach((col, c) => {
// leave room for left and right padding.
col.width = widths[c];
if (this.wrap) {
wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n');
}
else {
wrapped = col.text.split('\n');
}
if (col.border) {
wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.');
wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'");
}
// add top and bottom padding.
if (col.padding) {
wrapped.unshift(...new Array(col.padding[top] || 0).fill(''));
wrapped.push(...new Array(col.padding[bottom] || 0).fill(''));
}
wrapped.forEach((str, r) => {
if (!rrows[r]) {
rrows.push([]);
}
const rrow = rrows[r];
for (let i = 0; i < c; i++) {
if (rrow[i] === undefined) {
rrow.push('');
}
}
rrow.push(str);
});
});
return rrows;
}
negatePadding(col) {
let wrapWidth = col.width || 0;
if (col.padding) {
wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0);
}
if (col.border) {
wrapWidth -= 4;
}
return wrapWidth;
}
columnWidths(row) {
if (!this.wrap) {
return row.map(col => {
return col.width || mixin.stringWidth(col.text);
});
}
let unset = row.length;
let remainingWidth = this.width;
// column widths can be set in config.
const widths = row.map(col => {
if (col.width) {
unset--;
remainingWidth -= col.width;
return col.width;
}
return undefined;
});
// any unset widths should be calculated.
const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0;
return widths.map((w, i) => {
if (w === undefined) {
return Math.max(unsetWidth, _minWidth(row[i]));
}
return w;
});
}
}
function addBorder(col, ts, style) {
if (col.border) {
if (/[.']-+[.']/.test(ts)) {
return '';
}
if (ts.trim().length !== 0) {
return style;
}
return ' ';
}
return '';
}
// calculates the minimum width of
// a column, based on padding preferences.
function _minWidth(col) {
const padding = col.padding || [];
const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0);
if (col.border) {
return minWidth + 4;
}
return minWidth;
}
function getWindowWidth() {
/* c8 ignore next 5: depends on terminal */
if (typeof process === 'object' && process.stdout && process.stdout.columns) {
return process.stdout.columns;
}
return 80;
}
function alignRight(str, width) {
str = str.trim();
const strWidth = mixin.stringWidth(str);
if (strWidth < width) {
return ' '.repeat(width - strWidth) + str;
}
return str;
}
function alignCenter(str, width) {
str = str.trim();
const strWidth = mixin.stringWidth(str);
/* c8 ignore next 3 */
if (strWidth >= width) {
return str;
}
return ' '.repeat((width - strWidth) >> 1) + str;
}
let mixin;
export function cliui(opts, _mixin) {
mixin = _mixin;
return new UI({
width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(),
wrap: opts === null || opts === void 0 ? void 0 : opts.wrap
});
}

File diff suppressed because one or more lines are too long

15
Jira_helper/node_modules/cliui/index.mjs generated vendored Normal file
View File

@@ -0,0 +1,15 @@
// Bootstrap cliui with CommonJS dependencies:
import { cliui } from './build/lib/index.js'
import stringWidth from 'string-width'
import stripAnsi from 'strip-ansi'
import wrapAnsi from 'wrap-ansi'
export default function ui (opts) {
return cliui(opts, {
stringWidth,
stripAnsi,
wrap: wrapAnsi
})
}
export {ui as 'module.exports'};

72
Jira_helper/node_modules/cliui/package.json generated vendored Normal file
View File

@@ -0,0 +1,72 @@
{
"name": "cliui",
"version": "9.0.1",
"description": "easily create complex multi-column command-line-interfaces",
"main": "build/index.mjs",
"exports": {
".": "./index.mjs"
},
"type": "module",
"module": "./index.mjs",
"scripts": {
"check": "standardx '**/*.ts' && standardx '**/*.js'",
"fix": "standardx --fix '**/*.ts' && standardx --fix '**/*.js'",
"pretest": "rimraf build && tsc -p tsconfig.test.json",
"test": "c8 mocha ./test/*.mjs",
"postest": "check",
"coverage": "c8 report --check-coverage",
"precompile": "rimraf build",
"compile": "tsc",
"prepare": "npm run compile"
},
"repository": "yargs/cliui",
"standard": {
"ignore": [
"**/example/**"
],
"globals": [
"it"
]
},
"keywords": [
"cli",
"command-line",
"layout",
"design",
"console",
"wrap",
"table"
],
"author": "Ben Coe <ben@npmjs.com>",
"license": "ISC",
"dependencies": {
"string-width": "^7.2.0",
"strip-ansi": "^7.1.0",
"wrap-ansi": "^9.0.0"
},
"devDependencies": {
"@types/node": "^22.13.10",
"@typescript-eslint/eslint-plugin": "^4.0.0",
"@typescript-eslint/parser": "^4.0.0",
"c8": "^10.1.3",
"chai": "^5.2.0",
"chalk": "^5.4.1",
"cross-env": "^7.0.2",
"eslint": "^7.6.0",
"eslint-plugin-import": "^2.22.0",
"eslint-plugin-n": "^14.0.0",
"gts": "^6.0.2",
"mocha": "^11.1.0",
"rimraf": "^6.0.1",
"standardx": "^7.0.0",
"typescript": "^5.8.2"
},
"files": [
"build",
"index.mjs",
"!*.d.ts"
],
"engines": {
"node": ">=20"
}
}

21
Jira_helper/node_modules/csv-parse/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2010 Adaltas
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

82
Jira_helper/node_modules/csv-parse/README.md generated vendored Normal file
View File

@@ -0,0 +1,82 @@
# CSV parser for Node.js and the web
[![Build Status](https://img.shields.io/github/actions/workflow/status/adaltas/node-csv/nodejs.yml?branch=master)](https://github.com/adaltas/node-csv/actions)
[![NPM](https://img.shields.io/npm/dm/csv-parse)](https://www.npmjs.com/package/csv-parse)
[![NPM](https://img.shields.io/npm/v/csv-parse)](https://www.npmjs.com/package/csv-parse)
The [`csv-parse` package](https://csv.js.org/parse/) is a parser converting CSV text input into arrays or objects. It is part of the [CSV project](https://csv.js.org/).
It implements the Node.js [`stream.Transform` API](http://nodejs.org/api/stream.html#stream_class_stream_transform). It also provides a simple callback-based API for convenience. It is both extremely easy to use and powerful. It was first released in 2010 and is used against big data sets by a large community.
## Documentation
- [Project homepage](https://csv.js.org/parse/)
- [API](https://csv.js.org/parse/api/)
- [Options](https://csv.js.org/parse/options/)
- [Info properties](https://csv.js.org/parse/info/)
- [Common errors](https://csv.js.org/parse/errors/)
- [Examples](https://csv.js.org/project/examples/)
## Main features
- Flexible with lot of [options](https://csv.js.org/parse/options/)
- Multiple [distributions](https://csv.js.org/parse/distributions/): Node.js, Web, ECMAScript modules and CommonJS
- Follow the Node.js streaming API
- Simplicity with the optional callback API
- Support delimiters, quotes, escape characters and comments
- Line breaks discovery
- Support big datasets
- Complete test coverage and lot of samples for inspiration
- No external dependencies
- Work nicely with the [csv-generate](https://csv.js.org/generate/), [stream-transform](https://csv.js.org/transform/) and [csv-stringify](https://csv.js.org/stringify/) packages
- MIT License
## Usage
Run `npm install csv` to install the full CSV module or run `npm install csv-parse` if you are only interested by the CSV parser.
Use the callback and sync APIs for simplicity or the stream based API for scalability.
## Example
The [API](https://csv.js.org/parse/api/) is available in multiple flavors. This example illustrates the stream API.
```js
import assert from "assert";
import { parse } from "csv-parse";
const records = [];
// Initialize the parser
const parser = parse({
delimiter: ":",
});
// Use the readable stream api to consume records
parser.on("readable", function () {
let record;
while ((record = parser.read()) !== null) {
records.push(record);
}
});
// Catch any error
parser.on("error", function (err) {
console.error(err.message);
});
// Test that the parsed records matched the expected records
parser.on("end", function () {
assert.deepStrictEqual(records, [
["root", "x", "0", "0", "root", "/root", "/bin/bash"],
["someone", "x", "1022", "1022", "", "/home/someone", "/bin/bash"],
]);
});
// Write data to the stream
parser.write("root:x:0:0:root:/root:/bin/bash\n");
parser.write("someone:x:1022:1022::/home/someone:/bin/bash\n");
// Close the readable stream
parser.end();
```
## Contributors
The project is sponsored by [Adaltas](https://www.adaltas.com), an Big Data consulting firm based in Paris, France.
- David Worms: <https://github.com/wdavidw>

1898
Jira_helper/node_modules/csv-parse/dist/cjs/index.cjs generated vendored Normal file

File diff suppressed because it is too large Load Diff

507
Jira_helper/node_modules/csv-parse/dist/cjs/index.d.cts generated vendored Normal file
View File

@@ -0,0 +1,507 @@
// Original definitions in https://github.com/DefinitelyTyped/DefinitelyTyped by: David Muller <https://github.com/davidm77>
/// <reference types="node" />
import * as stream from "stream";
export type Callback<T = string[]> = (
err: CsvError | undefined,
records: T[],
info?: Info,
) => void;
// export interface Parser extends stream.Transform {}
// export class Parser<T> extends stream.Transform {
export class Parser extends stream.Transform {
constructor(options: Options);
// __push(line: T): CsvError | undefined;
__push(line: any): CsvError | undefined;
// __write(chars: any, end: any, callback: any): any;
readonly options: OptionsNormalized;
readonly info: Info;
}
export interface Info {
/**
* Count the number of lines being fully commented.
*/
readonly comment_lines: number;
/**
* Count the number of processed empty lines.
*/
readonly empty_lines: number;
/**
* The number of lines encountered in the source dataset, start at 1 for the first line.
*/
readonly lines: number;
/**
* Count the number of processed records.
*/
readonly records: number;
/**
* Count of the number of processed bytes.
*/
readonly bytes: number;
/**
* Number of non uniform records when `relax_column_count` is true.
*/
readonly invalid_field_length: number;
/**
* Normalized verion of `options.columns` when `options.columns` is true, boolean otherwise.
*/
readonly columns: boolean | { name: string }[] | { disabled: true }[];
}
export interface CastingContext {
readonly column: number | string;
readonly empty_lines: number;
readonly error: CsvError;
readonly header: boolean;
readonly index: number;
readonly quoting: boolean;
readonly lines: number;
readonly raw: string | undefined;
readonly records: number;
readonly invalid_field_length: number;
}
export type CastingFunction = (
value: string,
context: CastingContext,
) => unknown;
export type CastingDateFunction = (
value: string,
context: CastingContext,
) => Date;
export type ColumnOption<K = string> =
| K
| undefined
| null
| false
| { name: K };
export interface OptionsNormalized<T = string[]> {
auto_parse?: boolean | CastingFunction;
auto_parse_date?: boolean | CastingDateFunction;
/**
* If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
*/
bom?: boolean;
/**
* If true, the parser will attempt to convert input string to native types.
* If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
*/
cast?: boolean | CastingFunction;
/**
* If true, the parser will attempt to convert input string to dates.
* If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
*/
cast_date?: boolean | CastingDateFunction;
/**
* Internal property string the function to
*/
cast_first_line_to_header?: (
record: T,
) => ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[];
/**
* List of fields as an array, a user defined callback accepting the first
* line and returning the column names or true if autodiscovered in the first
* CSV line, default to null, affect the result data set in the sense that
* records will be objects instead of arrays.
*/
columns:
| boolean
| ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[];
/**
* Convert values into an array of values when columns are activated and
* when multiple columns of the same name are found.
*/
group_columns_by_name: boolean;
/**
* Treat all the characters after this one as a comment, default to '' (disabled).
*/
comment: string | null;
/**
* Restrict the definition of comments to a full line. Comment characters
* defined in the middle of the line are not interpreted as such. The
* option require the activation of comments.
*/
comment_no_infix: boolean;
/**
* Set the field delimiter. One character only, defaults to comma.
*/
delimiter: Buffer[];
/**
* Set the source and destination encoding, a value of `null` returns buffer instead of strings.
*/
encoding: BufferEncoding | null;
/**
* Set the escape character, one character only, defaults to double quotes.
*/
escape: null | Buffer;
/**
* Start handling records from the requested number of records.
*/
from: number;
/**
* Start handling records from the requested line number.
*/
from_line: number;
/**
* Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
*/
ignore_last_delimiters: boolean | number;
/**
* Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
*/
info: boolean;
/**
* If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
ltrim: boolean;
/**
* Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
* used to guard against a wrong delimiter or record_delimiter,
* default to 128000 characters.
*/
max_record_size: number;
/**
* Name of header-record title to name objects by.
*/
objname: number | string | undefined;
/**
* Alter and filter records by executing a user defined function.
*/
on_record?: (record: T, context: CastingContext) => T | undefined;
/**
* Optional character surrounding a field, one character only, defaults to double quotes.
*/
quote?: Buffer | null;
/**
* Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
*/
raw: boolean;
/**
* Discard inconsistent columns count, default to false.
*/
relax_column_count: boolean;
/**
* Discard inconsistent columns count when the record contains less fields than expected, default to false.
*/
relax_column_count_less: boolean;
/**
* Discard inconsistent columns count when the record contains more fields than expected, default to false.
*/
relax_column_count_more: boolean;
/**
* Preserve quotes inside unquoted field.
*/
relax_quotes: boolean;
/**
* One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
* Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
*/
record_delimiter: Buffer[];
/**
* If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
rtrim: boolean;
/**
* Dont generate empty values for empty lines.
* Defaults to false
*/
skip_empty_lines: boolean;
/**
* Skip a line with error found inside and directly go process the next line.
*/
skip_records_with_error: boolean;
/**
* Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
*/
skip_records_with_empty_values: boolean;
/**
* Stop handling records after the requested number of records.
*/
to: number;
/**
* Stop handling records after the requested line number.
*/
to_line: number;
/**
* If true, ignore whitespace immediately around the delimiter, defaults to false.
* Does not remove whitespace in a quoted field.
*/
trim: boolean;
}
/*
Note, could not `extends stream.TransformOptions` because encoding can be
BufferEncoding and undefined as well as null which is not defined in the
extended type.
*/
export interface Options<T = string[]> {
/**
* If true, the parser will attempt to convert read data types to native types.
* @deprecated Use {@link cast}
*/
auto_parse?: boolean | CastingFunction;
autoParse?: boolean | CastingFunction;
/**
* If true, the parser will attempt to convert read data types to dates. It requires the "auto_parse" option.
* @deprecated Use {@link cast_date}
*/
auto_parse_date?: boolean | CastingDateFunction;
autoParseDate?: boolean | CastingDateFunction;
/**
* If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
*/
bom?: boolean;
/**
* If true, the parser will attempt to convert input string to native types.
* If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
*/
cast?: boolean | CastingFunction;
/**
* If true, the parser will attempt to convert input string to dates.
* If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
*/
cast_date?: boolean | CastingDateFunction;
castDate?: boolean | CastingDateFunction;
/**
* List of fields as an array,
* a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line,
* default to null,
* affect the result data set in the sense that records will be objects instead of arrays.
*/
columns?:
| boolean
| ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[]
| ((
record: T,
) => ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[]);
/**
* Convert values into an array of values when columns are activated and
* when multiple columns of the same name are found.
*/
group_columns_by_name?: boolean;
groupColumnsByName?: boolean;
/**
* Treat all the characters after this one as a comment, default to '' (disabled).
*/
comment?: string | boolean | null;
/**
* Restrict the definition of comments to a full line. Comment characters
* defined in the middle of the line are not interpreted as such. The
* option require the activation of comments.
*/
comment_no_infix?: boolean | null;
/**
* Set the field delimiter. One character only, defaults to comma.
*/
delimiter?: string | string[] | Buffer;
/**
* Set the source and destination encoding, a value of `null` returns buffer instead of strings.
*/
encoding?: BufferEncoding | boolean | null | undefined;
/**
* Set the escape character, one character only, defaults to double quotes.
*/
escape?: string | null | boolean | Buffer;
/**
* Start handling records from the requested number of records.
*/
from?: number | string;
/**
* Start handling records from the requested line number.
*/
from_line?: null | number | string;
fromLine?: null | number | string;
/**
* Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
*/
ignore_last_delimiters?: boolean | number;
/**
* Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
*/
info?: boolean;
/**
* If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
ltrim?: boolean | null;
/**
* Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
* used to guard against a wrong delimiter or record_delimiter,
* default to 128000 characters.
*/
max_record_size?: number | null | string;
maxRecordSize?: number;
/**
* Name of header-record title to name objects by.
*/
objname?: Buffer | null | number | string;
/**
* Alter and filter records by executing a user defined function.
*/
on_record?: (record: T, context: CastingContext) => T | null | undefined;
onRecord?: (record: T, context: CastingContext) => T | null | undefined;
/**
* Function called when an error occured if the `skip_records_with_error`
* option is activated.
*/
on_skip?: (err: CsvError | undefined, raw: string | undefined) => undefined;
onSkip?: (err: CsvError | undefined, raw: string | undefined) => undefined;
/**
* Optional character surrounding a field, one character only, defaults to double quotes.
*/
quote?: string | boolean | Buffer | null;
/**
* Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
*/
raw?: boolean | null;
/**
* One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
* Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
*/
record_delimiter?: string | Buffer | null | (string | Buffer | null)[];
recordDelimiter?: string | Buffer | null | (string | Buffer | null)[];
/**
* Discard inconsistent columns count, default to false.
*/
relax_column_count?: boolean | null;
relaxColumnCount?: boolean | null;
/**
* Discard inconsistent columns count when the record contains less fields than expected, default to false.
*/
relax_column_count_less?: boolean | null;
relaxColumnCountLess?: boolean | null;
/**
* Discard inconsistent columns count when the record contains more fields than expected, default to false.
*/
relax_column_count_more?: boolean | null;
relaxColumnCountMore?: boolean | null;
/**
* Preserve quotes inside unquoted field.
*/
relax_quotes?: boolean | null;
relaxQuotes?: boolean | null;
/**
* If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
rtrim?: boolean | null;
/**
* Dont generate empty values for empty lines.
* Defaults to false
*/
skip_empty_lines?: boolean | null;
skipEmptyLines?: boolean | null;
/**
* Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
*/
skip_records_with_empty_values?: boolean | null;
skipRecordsWithEmptyValues?: boolean | null;
/**
* Skip a line with error found inside and directly go process the next line.
*/
skip_records_with_error?: boolean | null;
skipRecordsWithError?: boolean | null;
/**
* Stop handling records after the requested number of records.
*/
to?: null | number | string;
/**
* Stop handling records after the requested line number.
*/
to_line?: null | number | string;
toLine?: null | number | string;
/**
* If true, ignore whitespace immediately around the delimiter, defaults to false.
* Does not remove whitespace in a quoted field.
*/
trim?: boolean | null;
}
export type CsvErrorCode =
| "CSV_INVALID_ARGUMENT"
| "CSV_INVALID_CLOSING_QUOTE"
| "CSV_INVALID_COLUMN_DEFINITION"
| "CSV_INVALID_COLUMN_MAPPING"
| "CSV_INVALID_OPTION_BOM"
| "CSV_INVALID_OPTION_CAST"
| "CSV_INVALID_OPTION_CAST_DATE"
| "CSV_INVALID_OPTION_COLUMNS"
| "CSV_INVALID_OPTION_COMMENT"
| "CSV_INVALID_OPTION_DELIMITER"
| "CSV_INVALID_OPTION_GROUP_COLUMNS_BY_NAME"
| "CSV_INVALID_OPTION_ON_RECORD"
| "CSV_MAX_RECORD_SIZE"
| "CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE"
| "CSV_OPTION_COLUMNS_MISSING_NAME"
| "CSV_QUOTE_NOT_CLOSED"
| "CSV_RECORD_INCONSISTENT_FIELDS_LENGTH"
| "CSV_RECORD_INCONSISTENT_COLUMNS"
| "CSV_UNKNOWN_ERROR"
| "INVALID_OPENING_QUOTE";
export class CsvError extends Error {
readonly code: CsvErrorCode;
[key: string]: any;
constructor(
code: CsvErrorCode,
message: string | string[],
options?: OptionsNormalized,
...contexts: unknown[]
);
}
type OptionsWithColumns<T> = Omit<Options<T>, "columns"> & {
columns: Exclude<Options["columns"], undefined | false>;
};
declare function parse<T = unknown>(
input: string | Buffer | Uint8Array,
options: OptionsWithColumns<T>,
callback?: Callback<T>,
): Parser;
declare function parse(
input: string | Buffer | Uint8Array,
options: Options,
callback?: Callback,
): Parser;
declare function parse<T = unknown>(
options: OptionsWithColumns<T>,
callback?: Callback<T>,
): Parser;
declare function parse(options: Options, callback?: Callback): Parser;
declare function parse(
input: string | Buffer | Uint8Array,
callback?: Callback,
): Parser;
declare function parse(callback?: Callback): Parser;
// export default parse;
export { parse };
declare function normalize_options(opts: Options): OptionsNormalized;
export { normalize_options };

1786
Jira_helper/node_modules/csv-parse/dist/cjs/sync.cjs generated vendored Normal file

File diff suppressed because it is too large Load Diff

30
Jira_helper/node_modules/csv-parse/dist/cjs/sync.d.cts generated vendored Normal file
View File

@@ -0,0 +1,30 @@
import { Options } from "./index.cjs";
type OptionsWithColumns<T> = Omit<Options<T>, "columns"> & {
columns: Exclude<Options["columns"], undefined | false>;
};
declare function parse<T = unknown>(
input: Buffer | string | Uint8Array,
options: OptionsWithColumns<T>,
): T[];
declare function parse(
input: Buffer | string | Uint8Array,
options: Options,
): string[][];
declare function parse(input: Buffer | string | Uint8Array): string[][];
// export default parse;
export { parse };
export {
CastingContext,
CastingFunction,
CastingDateFunction,
ColumnOption,
Options,
OptionsNormalized,
Info,
CsvErrorCode,
CsvError,
} from "./index.cjs";

507
Jira_helper/node_modules/csv-parse/dist/esm/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,507 @@
// Original definitions in https://github.com/DefinitelyTyped/DefinitelyTyped by: David Muller <https://github.com/davidm77>
/// <reference types="node" />
import * as stream from "stream";
export type Callback<T = string[]> = (
err: CsvError | undefined,
records: T[],
info?: Info,
) => void;
// export interface Parser extends stream.Transform {}
// export class Parser<T> extends stream.Transform {
export class Parser extends stream.Transform {
constructor(options: Options);
// __push(line: T): CsvError | undefined;
__push(line: any): CsvError | undefined;
// __write(chars: any, end: any, callback: any): any;
readonly options: OptionsNormalized;
readonly info: Info;
}
export interface Info {
/**
* Count the number of lines being fully commented.
*/
readonly comment_lines: number;
/**
* Count the number of processed empty lines.
*/
readonly empty_lines: number;
/**
* The number of lines encountered in the source dataset, start at 1 for the first line.
*/
readonly lines: number;
/**
* Count the number of processed records.
*/
readonly records: number;
/**
* Count of the number of processed bytes.
*/
readonly bytes: number;
/**
* Number of non uniform records when `relax_column_count` is true.
*/
readonly invalid_field_length: number;
/**
* Normalized verion of `options.columns` when `options.columns` is true, boolean otherwise.
*/
readonly columns: boolean | { name: string }[] | { disabled: true }[];
}
export interface CastingContext {
readonly column: number | string;
readonly empty_lines: number;
readonly error: CsvError;
readonly header: boolean;
readonly index: number;
readonly quoting: boolean;
readonly lines: number;
readonly raw: string | undefined;
readonly records: number;
readonly invalid_field_length: number;
}
export type CastingFunction = (
value: string,
context: CastingContext,
) => unknown;
export type CastingDateFunction = (
value: string,
context: CastingContext,
) => Date;
export type ColumnOption<K = string> =
| K
| undefined
| null
| false
| { name: K };
export interface OptionsNormalized<T = string[]> {
auto_parse?: boolean | CastingFunction;
auto_parse_date?: boolean | CastingDateFunction;
/**
* If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
*/
bom?: boolean;
/**
* If true, the parser will attempt to convert input string to native types.
* If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
*/
cast?: boolean | CastingFunction;
/**
* If true, the parser will attempt to convert input string to dates.
* If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
*/
cast_date?: boolean | CastingDateFunction;
/**
* Internal property string the function to
*/
cast_first_line_to_header?: (
record: T,
) => ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[];
/**
* List of fields as an array, a user defined callback accepting the first
* line and returning the column names or true if autodiscovered in the first
* CSV line, default to null, affect the result data set in the sense that
* records will be objects instead of arrays.
*/
columns:
| boolean
| ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[];
/**
* Convert values into an array of values when columns are activated and
* when multiple columns of the same name are found.
*/
group_columns_by_name: boolean;
/**
* Treat all the characters after this one as a comment, default to '' (disabled).
*/
comment: string | null;
/**
* Restrict the definition of comments to a full line. Comment characters
* defined in the middle of the line are not interpreted as such. The
* option require the activation of comments.
*/
comment_no_infix: boolean;
/**
* Set the field delimiter. One character only, defaults to comma.
*/
delimiter: Buffer[];
/**
* Set the source and destination encoding, a value of `null` returns buffer instead of strings.
*/
encoding: BufferEncoding | null;
/**
* Set the escape character, one character only, defaults to double quotes.
*/
escape: null | Buffer;
/**
* Start handling records from the requested number of records.
*/
from: number;
/**
* Start handling records from the requested line number.
*/
from_line: number;
/**
* Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
*/
ignore_last_delimiters: boolean | number;
/**
* Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
*/
info: boolean;
/**
* If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
ltrim: boolean;
/**
* Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
* used to guard against a wrong delimiter or record_delimiter,
* default to 128000 characters.
*/
max_record_size: number;
/**
* Name of header-record title to name objects by.
*/
objname: number | string | undefined;
/**
* Alter and filter records by executing a user defined function.
*/
on_record?: (record: T, context: CastingContext) => T | undefined;
/**
* Optional character surrounding a field, one character only, defaults to double quotes.
*/
quote?: Buffer | null;
/**
* Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
*/
raw: boolean;
/**
* Discard inconsistent columns count, default to false.
*/
relax_column_count: boolean;
/**
* Discard inconsistent columns count when the record contains less fields than expected, default to false.
*/
relax_column_count_less: boolean;
/**
* Discard inconsistent columns count when the record contains more fields than expected, default to false.
*/
relax_column_count_more: boolean;
/**
* Preserve quotes inside unquoted field.
*/
relax_quotes: boolean;
/**
* One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
* Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
*/
record_delimiter: Buffer[];
/**
* If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
rtrim: boolean;
/**
* Dont generate empty values for empty lines.
* Defaults to false
*/
skip_empty_lines: boolean;
/**
* Skip a line with error found inside and directly go process the next line.
*/
skip_records_with_error: boolean;
/**
* Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
*/
skip_records_with_empty_values: boolean;
/**
* Stop handling records after the requested number of records.
*/
to: number;
/**
* Stop handling records after the requested line number.
*/
to_line: number;
/**
* If true, ignore whitespace immediately around the delimiter, defaults to false.
* Does not remove whitespace in a quoted field.
*/
trim: boolean;
}
/*
Note, could not `extends stream.TransformOptions` because encoding can be
BufferEncoding and undefined as well as null which is not defined in the
extended type.
*/
export interface Options<T = string[]> {
/**
* If true, the parser will attempt to convert read data types to native types.
* @deprecated Use {@link cast}
*/
auto_parse?: boolean | CastingFunction;
autoParse?: boolean | CastingFunction;
/**
* If true, the parser will attempt to convert read data types to dates. It requires the "auto_parse" option.
* @deprecated Use {@link cast_date}
*/
auto_parse_date?: boolean | CastingDateFunction;
autoParseDate?: boolean | CastingDateFunction;
/**
* If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
*/
bom?: boolean;
/**
* If true, the parser will attempt to convert input string to native types.
* If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
*/
cast?: boolean | CastingFunction;
/**
* If true, the parser will attempt to convert input string to dates.
* If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
*/
cast_date?: boolean | CastingDateFunction;
castDate?: boolean | CastingDateFunction;
/**
* List of fields as an array,
* a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line,
* default to null,
* affect the result data set in the sense that records will be objects instead of arrays.
*/
columns?:
| boolean
| ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[]
| ((
record: T,
) => ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[]);
/**
* Convert values into an array of values when columns are activated and
* when multiple columns of the same name are found.
*/
group_columns_by_name?: boolean;
groupColumnsByName?: boolean;
/**
* Treat all the characters after this one as a comment, default to '' (disabled).
*/
comment?: string | boolean | null;
/**
* Restrict the definition of comments to a full line. Comment characters
* defined in the middle of the line are not interpreted as such. The
* option require the activation of comments.
*/
comment_no_infix?: boolean | null;
/**
* Set the field delimiter. One character only, defaults to comma.
*/
delimiter?: string | string[] | Buffer;
/**
* Set the source and destination encoding, a value of `null` returns buffer instead of strings.
*/
encoding?: BufferEncoding | boolean | null | undefined;
/**
* Set the escape character, one character only, defaults to double quotes.
*/
escape?: string | null | boolean | Buffer;
/**
* Start handling records from the requested number of records.
*/
from?: number | string;
/**
* Start handling records from the requested line number.
*/
from_line?: null | number | string;
fromLine?: null | number | string;
/**
* Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
*/
ignore_last_delimiters?: boolean | number;
/**
* Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
*/
info?: boolean;
/**
* If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
ltrim?: boolean | null;
/**
* Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
* used to guard against a wrong delimiter or record_delimiter,
* default to 128000 characters.
*/
max_record_size?: number | null | string;
maxRecordSize?: number;
/**
* Name of header-record title to name objects by.
*/
objname?: Buffer | null | number | string;
/**
* Alter and filter records by executing a user defined function.
*/
on_record?: (record: T, context: CastingContext) => T | null | undefined;
onRecord?: (record: T, context: CastingContext) => T | null | undefined;
/**
* Function called when an error occured if the `skip_records_with_error`
* option is activated.
*/
on_skip?: (err: CsvError | undefined, raw: string | undefined) => undefined;
onSkip?: (err: CsvError | undefined, raw: string | undefined) => undefined;
/**
* Optional character surrounding a field, one character only, defaults to double quotes.
*/
quote?: string | boolean | Buffer | null;
/**
* Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
*/
raw?: boolean | null;
/**
* One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
* Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
*/
record_delimiter?: string | Buffer | null | (string | Buffer | null)[];
recordDelimiter?: string | Buffer | null | (string | Buffer | null)[];
/**
* Discard inconsistent columns count, default to false.
*/
relax_column_count?: boolean | null;
relaxColumnCount?: boolean | null;
/**
* Discard inconsistent columns count when the record contains less fields than expected, default to false.
*/
relax_column_count_less?: boolean | null;
relaxColumnCountLess?: boolean | null;
/**
* Discard inconsistent columns count when the record contains more fields than expected, default to false.
*/
relax_column_count_more?: boolean | null;
relaxColumnCountMore?: boolean | null;
/**
* Preserve quotes inside unquoted field.
*/
relax_quotes?: boolean | null;
relaxQuotes?: boolean | null;
/**
* If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
rtrim?: boolean | null;
/**
* Dont generate empty values for empty lines.
* Defaults to false
*/
skip_empty_lines?: boolean | null;
skipEmptyLines?: boolean | null;
/**
* Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
*/
skip_records_with_empty_values?: boolean | null;
skipRecordsWithEmptyValues?: boolean | null;
/**
* Skip a line with error found inside and directly go process the next line.
*/
skip_records_with_error?: boolean | null;
skipRecordsWithError?: boolean | null;
/**
* Stop handling records after the requested number of records.
*/
to?: null | number | string;
/**
* Stop handling records after the requested line number.
*/
to_line?: null | number | string;
toLine?: null | number | string;
/**
* If true, ignore whitespace immediately around the delimiter, defaults to false.
* Does not remove whitespace in a quoted field.
*/
trim?: boolean | null;
}
export type CsvErrorCode =
| "CSV_INVALID_ARGUMENT"
| "CSV_INVALID_CLOSING_QUOTE"
| "CSV_INVALID_COLUMN_DEFINITION"
| "CSV_INVALID_COLUMN_MAPPING"
| "CSV_INVALID_OPTION_BOM"
| "CSV_INVALID_OPTION_CAST"
| "CSV_INVALID_OPTION_CAST_DATE"
| "CSV_INVALID_OPTION_COLUMNS"
| "CSV_INVALID_OPTION_COMMENT"
| "CSV_INVALID_OPTION_DELIMITER"
| "CSV_INVALID_OPTION_GROUP_COLUMNS_BY_NAME"
| "CSV_INVALID_OPTION_ON_RECORD"
| "CSV_MAX_RECORD_SIZE"
| "CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE"
| "CSV_OPTION_COLUMNS_MISSING_NAME"
| "CSV_QUOTE_NOT_CLOSED"
| "CSV_RECORD_INCONSISTENT_FIELDS_LENGTH"
| "CSV_RECORD_INCONSISTENT_COLUMNS"
| "CSV_UNKNOWN_ERROR"
| "INVALID_OPENING_QUOTE";
export class CsvError extends Error {
readonly code: CsvErrorCode;
[key: string]: any;
constructor(
code: CsvErrorCode,
message: string | string[],
options?: OptionsNormalized,
...contexts: unknown[]
);
}
type OptionsWithColumns<T> = Omit<Options<T>, "columns"> & {
columns: Exclude<Options["columns"], undefined | false>;
};
declare function parse<T = unknown>(
input: string | Buffer | Uint8Array,
options: OptionsWithColumns<T>,
callback?: Callback<T>,
): Parser;
declare function parse(
input: string | Buffer | Uint8Array,
options: Options,
callback?: Callback,
): Parser;
declare function parse<T = unknown>(
options: OptionsWithColumns<T>,
callback?: Callback<T>,
): Parser;
declare function parse(options: Options, callback?: Callback): Parser;
declare function parse(
input: string | Buffer | Uint8Array,
callback?: Callback,
): Parser;
declare function parse(callback?: Callback): Parser;
// export default parse;
export { parse };
declare function normalize_options(opts: Options): OptionsNormalized;
export { normalize_options };

6936
Jira_helper/node_modules/csv-parse/dist/esm/index.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,17 @@
import { Options } from "./index.js";
declare function parse(options?: Options): TransformStream;
// export default parse;
export { parse };
export {
CastingContext,
CastingFunction,
CastingDateFunction,
ColumnOption,
Options,
OptionsNormalized,
Info,
CsvErrorCode,
CsvError,
} from "./index.js";

30
Jira_helper/node_modules/csv-parse/dist/esm/sync.d.ts generated vendored Normal file
View File

@@ -0,0 +1,30 @@
import { Options } from "./index.js";
type OptionsWithColumns<T> = Omit<Options<T>, "columns"> & {
columns: Exclude<Options["columns"], undefined | false>;
};
declare function parse<T = unknown>(
input: Buffer | string | Uint8Array,
options: OptionsWithColumns<T>,
): T[];
declare function parse(
input: Buffer | string | Uint8Array,
options: Options,
): string[][];
declare function parse(input: Buffer | string | Uint8Array): string[][];
// export default parse;
export { parse };
export {
CastingContext,
CastingFunction,
CastingDateFunction,
ColumnOption,
Options,
OptionsNormalized,
Info,
CsvErrorCode,
CsvError,
} from "./index.js";

3755
Jira_helper/node_modules/csv-parse/dist/esm/sync.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

6946
Jira_helper/node_modules/csv-parse/dist/iife/index.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

3763
Jira_helper/node_modules/csv-parse/dist/iife/sync.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

6947
Jira_helper/node_modules/csv-parse/dist/umd/index.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

3764
Jira_helper/node_modules/csv-parse/dist/umd/sync.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

22
Jira_helper/node_modules/csv-parse/lib/api/CsvError.js generated vendored Normal file
View File

@@ -0,0 +1,22 @@
class CsvError extends Error {
constructor(code, message, options, ...contexts) {
if (Array.isArray(message)) message = message.join(" ").trim();
super(message);
if (Error.captureStackTrace !== undefined) {
Error.captureStackTrace(this, CsvError);
}
this.code = code;
for (const context of contexts) {
for (const key in context) {
const value = context[key];
this[key] = Buffer.isBuffer(value)
? value.toString(options.encoding)
: value == null
? value
: JSON.parse(JSON.stringify(value));
}
}
}
}
export { CsvError };

916
Jira_helper/node_modules/csv-parse/lib/api/index.js generated vendored Normal file
View File

@@ -0,0 +1,916 @@
import { normalize_columns_array } from "./normalize_columns_array.js";
import { init_state } from "./init_state.js";
import { normalize_options } from "./normalize_options.js";
import { CsvError } from "./CsvError.js";
const isRecordEmpty = function (record) {
return record.every(
(field) =>
field == null || (field.toString && field.toString().trim() === ""),
);
};
const cr = 13; // `\r`, carriage return, 0x0D in hexadécimal, 13 in decimal
const nl = 10; // `\n`, newline, 0x0A in hexadecimal, 10 in decimal
const boms = {
// Note, the following are equals:
// Buffer.from("\ufeff")
// Buffer.from([239, 187, 191])
// Buffer.from('EFBBBF', 'hex')
utf8: Buffer.from([239, 187, 191]),
// Note, the following are equals:
// Buffer.from "\ufeff", 'utf16le
// Buffer.from([255, 254])
utf16le: Buffer.from([255, 254]),
};
const transform = function (original_options = {}) {
const info = {
bytes: 0,
comment_lines: 0,
empty_lines: 0,
invalid_field_length: 0,
lines: 1,
records: 0,
};
const options = normalize_options(original_options);
return {
info: info,
original_options: original_options,
options: options,
state: init_state(options),
__needMoreData: function (i, bufLen, end) {
if (end) return false;
const { encoding, escape, quote } = this.options;
const { quoting, needMoreDataSize, recordDelimiterMaxLength } =
this.state;
const numOfCharLeft = bufLen - i - 1;
const requiredLength = Math.max(
needMoreDataSize,
// Skip if the remaining buffer smaller than record delimiter
// If "record_delimiter" is yet to be discovered:
// 1. It is equals to `[]` and "recordDelimiterMaxLength" equals `0`
// 2. We set the length to windows line ending in the current encoding
// Note, that encoding is known from user or bom discovery at that point
// recordDelimiterMaxLength,
recordDelimiterMaxLength === 0
? Buffer.from("\r\n", encoding).length
: recordDelimiterMaxLength,
// Skip if remaining buffer can be an escaped quote
quoting ? (escape === null ? 0 : escape.length) + quote.length : 0,
// Skip if remaining buffer can be record delimiter following the closing quote
quoting ? quote.length + recordDelimiterMaxLength : 0,
);
return numOfCharLeft < requiredLength;
},
// Central parser implementation
parse: function (nextBuf, end, push, close) {
const {
bom,
comment_no_infix,
encoding,
from_line,
ltrim,
max_record_size,
raw,
relax_quotes,
rtrim,
skip_empty_lines,
to,
to_line,
} = this.options;
let { comment, escape, quote, record_delimiter } = this.options;
const { bomSkipped, previousBuf, rawBuffer, escapeIsQuote } = this.state;
let buf;
if (previousBuf === undefined) {
if (nextBuf === undefined) {
// Handle empty string
close();
return;
} else {
buf = nextBuf;
}
} else if (previousBuf !== undefined && nextBuf === undefined) {
buf = previousBuf;
} else {
buf = Buffer.concat([previousBuf, nextBuf]);
}
// Handle UTF BOM
if (bomSkipped === false) {
if (bom === false) {
this.state.bomSkipped = true;
} else if (buf.length < 3) {
// No enough data
if (end === false) {
// Wait for more data
this.state.previousBuf = buf;
return;
}
} else {
for (const encoding in boms) {
if (boms[encoding].compare(buf, 0, boms[encoding].length) === 0) {
// Skip BOM
const bomLength = boms[encoding].length;
this.state.bufBytesStart += bomLength;
buf = buf.slice(bomLength);
// Renormalize original options with the new encoding
const options = normalize_options({
...this.original_options,
encoding: encoding,
});
// Properties are merged with the existing options instance
for (const key in options) {
this.options[key] = options[key];
}
// Options will re-evaluate the Buffer with the new encoding
({ comment, escape, quote } = this.options);
break;
}
}
this.state.bomSkipped = true;
}
}
const bufLen = buf.length;
let pos;
for (pos = 0; pos < bufLen; pos++) {
// Ensure we get enough space to look ahead
// There should be a way to move this out of the loop
if (this.__needMoreData(pos, bufLen, end)) {
break;
}
if (this.state.wasRowDelimiter === true) {
this.info.lines++;
this.state.wasRowDelimiter = false;
}
if (to_line !== -1 && this.info.lines > to_line) {
this.state.stop = true;
close();
return;
}
// Auto discovery of record_delimiter, unix, mac and windows supported
if (this.state.quoting === false && record_delimiter.length === 0) {
const record_delimiterCount = this.__autoDiscoverRecordDelimiter(
buf,
pos,
);
if (record_delimiterCount) {
record_delimiter = this.options.record_delimiter;
}
}
const chr = buf[pos];
if (raw === true) {
rawBuffer.append(chr);
}
if (
(chr === cr || chr === nl) &&
this.state.wasRowDelimiter === false
) {
this.state.wasRowDelimiter = true;
}
// Previous char was a valid escape char
// treat the current char as a regular char
if (this.state.escaping === true) {
this.state.escaping = false;
} else {
// Escape is only active inside quoted fields
// We are quoting, the char is an escape chr and there is a chr to escape
// if(escape !== null && this.state.quoting === true && chr === escape && pos + 1 < bufLen){
if (
escape !== null &&
this.state.quoting === true &&
this.__isEscape(buf, pos, chr) &&
pos + escape.length < bufLen
) {
if (escapeIsQuote) {
if (this.__isQuote(buf, pos + escape.length)) {
this.state.escaping = true;
pos += escape.length - 1;
continue;
}
} else {
this.state.escaping = true;
pos += escape.length - 1;
continue;
}
}
// Not currently escaping and chr is a quote
// TODO: need to compare bytes instead of single char
if (this.state.commenting === false && this.__isQuote(buf, pos)) {
if (this.state.quoting === true) {
const nextChr = buf[pos + quote.length];
const isNextChrTrimable =
rtrim && this.__isCharTrimable(buf, pos + quote.length);
const isNextChrComment =
comment !== null &&
this.__compareBytes(comment, buf, pos + quote.length, nextChr);
const isNextChrDelimiter = this.__isDelimiter(
buf,
pos + quote.length,
nextChr,
);
const isNextChrRecordDelimiter =
record_delimiter.length === 0
? this.__autoDiscoverRecordDelimiter(buf, pos + quote.length)
: this.__isRecordDelimiter(nextChr, buf, pos + quote.length);
// Escape a quote
// Treat next char as a regular character
if (
escape !== null &&
this.__isEscape(buf, pos, chr) &&
this.__isQuote(buf, pos + escape.length)
) {
pos += escape.length - 1;
} else if (
!nextChr ||
isNextChrDelimiter ||
isNextChrRecordDelimiter ||
isNextChrComment ||
isNextChrTrimable
) {
this.state.quoting = false;
this.state.wasQuoting = true;
pos += quote.length - 1;
continue;
} else if (relax_quotes === false) {
const err = this.__error(
new CsvError(
"CSV_INVALID_CLOSING_QUOTE",
[
"Invalid Closing Quote:",
`got "${String.fromCharCode(nextChr)}"`,
`at line ${this.info.lines}`,
"instead of delimiter, record delimiter, trimable character",
"(if activated) or comment",
],
this.options,
this.__infoField(),
),
);
if (err !== undefined) return err;
} else {
this.state.quoting = false;
this.state.wasQuoting = true;
this.state.field.prepend(quote);
pos += quote.length - 1;
}
} else {
if (this.state.field.length !== 0) {
// In relax_quotes mode, treat opening quote preceded by chrs as regular
if (relax_quotes === false) {
const info = this.__infoField();
const bom = Object.keys(boms)
.map((b) =>
boms[b].equals(this.state.field.toString()) ? b : false,
)
.filter(Boolean)[0];
const err = this.__error(
new CsvError(
"INVALID_OPENING_QUOTE",
[
"Invalid Opening Quote:",
`a quote is found on field ${JSON.stringify(info.column)} at line ${info.lines}, value is ${JSON.stringify(this.state.field.toString(encoding))}`,
bom ? `(${bom} bom)` : undefined,
],
this.options,
info,
{
field: this.state.field,
},
),
);
if (err !== undefined) return err;
}
} else {
this.state.quoting = true;
pos += quote.length - 1;
continue;
}
}
}
if (this.state.quoting === false) {
const recordDelimiterLength = this.__isRecordDelimiter(
chr,
buf,
pos,
);
if (recordDelimiterLength !== 0) {
// Do not emit comments which take a full line
const skipCommentLine =
this.state.commenting &&
this.state.wasQuoting === false &&
this.state.record.length === 0 &&
this.state.field.length === 0;
if (skipCommentLine) {
this.info.comment_lines++;
// Skip full comment line
} else {
// Activate records emition if above from_line
if (
this.state.enabled === false &&
this.info.lines +
(this.state.wasRowDelimiter === true ? 1 : 0) >=
from_line
) {
this.state.enabled = true;
this.__resetField();
this.__resetRecord();
pos += recordDelimiterLength - 1;
continue;
}
// Skip if line is empty and skip_empty_lines activated
if (
skip_empty_lines === true &&
this.state.wasQuoting === false &&
this.state.record.length === 0 &&
this.state.field.length === 0
) {
this.info.empty_lines++;
pos += recordDelimiterLength - 1;
continue;
}
this.info.bytes = this.state.bufBytesStart + pos;
const errField = this.__onField();
if (errField !== undefined) return errField;
this.info.bytes =
this.state.bufBytesStart + pos + recordDelimiterLength;
const errRecord = this.__onRecord(push);
if (errRecord !== undefined) return errRecord;
if (to !== -1 && this.info.records >= to) {
this.state.stop = true;
close();
return;
}
}
this.state.commenting = false;
pos += recordDelimiterLength - 1;
continue;
}
if (this.state.commenting) {
continue;
}
if (
comment !== null &&
(comment_no_infix === false ||
(this.state.record.length === 0 &&
this.state.field.length === 0))
) {
const commentCount = this.__compareBytes(comment, buf, pos, chr);
if (commentCount !== 0) {
this.state.commenting = true;
continue;
}
}
const delimiterLength = this.__isDelimiter(buf, pos, chr);
if (delimiterLength !== 0) {
this.info.bytes = this.state.bufBytesStart + pos;
const errField = this.__onField();
if (errField !== undefined) return errField;
pos += delimiterLength - 1;
continue;
}
}
}
if (this.state.commenting === false) {
if (
max_record_size !== 0 &&
this.state.record_length + this.state.field.length > max_record_size
) {
return this.__error(
new CsvError(
"CSV_MAX_RECORD_SIZE",
[
"Max Record Size:",
"record exceed the maximum number of tolerated bytes",
`of ${max_record_size}`,
`at line ${this.info.lines}`,
],
this.options,
this.__infoField(),
),
);
}
}
const lappend =
ltrim === false ||
this.state.quoting === true ||
this.state.field.length !== 0 ||
!this.__isCharTrimable(buf, pos);
// rtrim in non quoting is handle in __onField
const rappend = rtrim === false || this.state.wasQuoting === false;
if (lappend === true && rappend === true) {
this.state.field.append(chr);
} else if (rtrim === true && !this.__isCharTrimable(buf, pos)) {
return this.__error(
new CsvError(
"CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE",
[
"Invalid Closing Quote:",
"found non trimable byte after quote",
`at line ${this.info.lines}`,
],
this.options,
this.__infoField(),
),
);
} else {
if (lappend === false) {
pos += this.__isCharTrimable(buf, pos) - 1;
}
continue;
}
}
if (end === true) {
// Ensure we are not ending in a quoting state
if (this.state.quoting === true) {
const err = this.__error(
new CsvError(
"CSV_QUOTE_NOT_CLOSED",
[
"Quote Not Closed:",
`the parsing is finished with an opening quote at line ${this.info.lines}`,
],
this.options,
this.__infoField(),
),
);
if (err !== undefined) return err;
} else {
// Skip last line if it has no characters
if (
this.state.wasQuoting === true ||
this.state.record.length !== 0 ||
this.state.field.length !== 0
) {
this.info.bytes = this.state.bufBytesStart + pos;
const errField = this.__onField();
if (errField !== undefined) return errField;
const errRecord = this.__onRecord(push);
if (errRecord !== undefined) return errRecord;
} else if (this.state.wasRowDelimiter === true) {
this.info.empty_lines++;
} else if (this.state.commenting === true) {
this.info.comment_lines++;
}
}
} else {
this.state.bufBytesStart += pos;
this.state.previousBuf = buf.slice(pos);
}
if (this.state.wasRowDelimiter === true) {
this.info.lines++;
this.state.wasRowDelimiter = false;
}
},
__onRecord: function (push) {
const {
columns,
group_columns_by_name,
encoding,
info,
from,
relax_column_count,
relax_column_count_less,
relax_column_count_more,
raw,
skip_records_with_empty_values,
} = this.options;
const { enabled, record } = this.state;
if (enabled === false) {
return this.__resetRecord();
}
// Convert the first line into column names
const recordLength = record.length;
if (columns === true) {
if (skip_records_with_empty_values === true && isRecordEmpty(record)) {
this.__resetRecord();
return;
}
return this.__firstLineToColumns(record);
}
if (columns === false && this.info.records === 0) {
this.state.expectedRecordLength = recordLength;
}
if (recordLength !== this.state.expectedRecordLength) {
const err =
columns === false
? new CsvError(
"CSV_RECORD_INCONSISTENT_FIELDS_LENGTH",
[
"Invalid Record Length:",
`expect ${this.state.expectedRecordLength},`,
`got ${recordLength} on line ${this.info.lines}`,
],
this.options,
this.__infoField(),
{
record: record,
},
)
: new CsvError(
"CSV_RECORD_INCONSISTENT_COLUMNS",
[
"Invalid Record Length:",
`columns length is ${columns.length},`, // rename columns
`got ${recordLength} on line ${this.info.lines}`,
],
this.options,
this.__infoField(),
{
record: record,
},
);
if (
relax_column_count === true ||
(relax_column_count_less === true &&
recordLength < this.state.expectedRecordLength) ||
(relax_column_count_more === true &&
recordLength > this.state.expectedRecordLength)
) {
this.info.invalid_field_length++;
this.state.error = err;
// Error is undefined with skip_records_with_error
} else {
const finalErr = this.__error(err);
if (finalErr) return finalErr;
}
}
if (skip_records_with_empty_values === true && isRecordEmpty(record)) {
this.__resetRecord();
return;
}
if (this.state.recordHasError === true) {
this.__resetRecord();
this.state.recordHasError = false;
return;
}
this.info.records++;
if (from === 1 || this.info.records >= from) {
const { objname } = this.options;
// With columns, records are object
if (columns !== false) {
const obj = {};
// Transform record array to an object
for (let i = 0, l = record.length; i < l; i++) {
if (columns[i] === undefined || columns[i].disabled) continue;
// Turn duplicate columns into an array
if (
group_columns_by_name === true &&
obj[columns[i].name] !== undefined
) {
if (Array.isArray(obj[columns[i].name])) {
obj[columns[i].name] = obj[columns[i].name].concat(record[i]);
} else {
obj[columns[i].name] = [obj[columns[i].name], record[i]];
}
} else {
obj[columns[i].name] = record[i];
}
}
// Without objname (default)
if (raw === true || info === true) {
const extRecord = Object.assign(
{ record: obj },
raw === true
? { raw: this.state.rawBuffer.toString(encoding) }
: {},
info === true ? { info: this.__infoRecord() } : {},
);
const err = this.__push(
objname === undefined ? extRecord : [obj[objname], extRecord],
push,
);
if (err) {
return err;
}
} else {
const err = this.__push(
objname === undefined ? obj : [obj[objname], obj],
push,
);
if (err) {
return err;
}
}
// Without columns, records are array
} else {
if (raw === true || info === true) {
const extRecord = Object.assign(
{ record: record },
raw === true
? { raw: this.state.rawBuffer.toString(encoding) }
: {},
info === true ? { info: this.__infoRecord() } : {},
);
const err = this.__push(
objname === undefined ? extRecord : [record[objname], extRecord],
push,
);
if (err) {
return err;
}
} else {
const err = this.__push(
objname === undefined ? record : [record[objname], record],
push,
);
if (err) {
return err;
}
}
}
}
this.__resetRecord();
},
__firstLineToColumns: function (record) {
const { firstLineToHeaders } = this.state;
try {
const headers =
firstLineToHeaders === undefined
? record
: firstLineToHeaders.call(null, record);
if (!Array.isArray(headers)) {
return this.__error(
new CsvError(
"CSV_INVALID_COLUMN_MAPPING",
[
"Invalid Column Mapping:",
"expect an array from column function,",
`got ${JSON.stringify(headers)}`,
],
this.options,
this.__infoField(),
{
headers: headers,
},
),
);
}
const normalizedHeaders = normalize_columns_array(headers);
this.state.expectedRecordLength = normalizedHeaders.length;
this.options.columns = normalizedHeaders;
this.__resetRecord();
return;
} catch (err) {
return err;
}
},
__resetRecord: function () {
if (this.options.raw === true) {
this.state.rawBuffer.reset();
}
this.state.error = undefined;
this.state.record = [];
this.state.record_length = 0;
},
__onField: function () {
const { cast, encoding, rtrim, max_record_size } = this.options;
const { enabled, wasQuoting } = this.state;
// Short circuit for the from_line options
if (enabled === false) {
return this.__resetField();
}
let field = this.state.field.toString(encoding);
if (rtrim === true && wasQuoting === false) {
field = field.trimRight();
}
if (cast === true) {
const [err, f] = this.__cast(field);
if (err !== undefined) return err;
field = f;
}
this.state.record.push(field);
// Increment record length if record size must not exceed a limit
if (max_record_size !== 0 && typeof field === "string") {
this.state.record_length += field.length;
}
this.__resetField();
},
__resetField: function () {
this.state.field.reset();
this.state.wasQuoting = false;
},
__push: function (record, push) {
const { on_record } = this.options;
if (on_record !== undefined) {
const info = this.__infoRecord();
try {
record = on_record.call(null, record, info);
} catch (err) {
return err;
}
if (record === undefined || record === null) {
return;
}
}
push(record);
},
// Return a tuple with the error and the casted value
__cast: function (field) {
const { columns, relax_column_count } = this.options;
const isColumns = Array.isArray(columns);
// Dont loose time calling cast
// because the final record is an object
// and this field can't be associated to a key present in columns
if (
isColumns === true &&
relax_column_count &&
this.options.columns.length <= this.state.record.length
) {
return [undefined, undefined];
}
if (this.state.castField !== null) {
try {
const info = this.__infoField();
return [undefined, this.state.castField.call(null, field, info)];
} catch (err) {
return [err];
}
}
if (this.__isFloat(field)) {
return [undefined, parseFloat(field)];
} else if (this.options.cast_date !== false) {
const info = this.__infoField();
return [undefined, this.options.cast_date.call(null, field, info)];
}
return [undefined, field];
},
// Helper to test if a character is a space or a line delimiter
__isCharTrimable: function (buf, pos) {
const isTrim = (buf, pos) => {
const { timchars } = this.state;
loop1: for (let i = 0; i < timchars.length; i++) {
const timchar = timchars[i];
for (let j = 0; j < timchar.length; j++) {
if (timchar[j] !== buf[pos + j]) continue loop1;
}
return timchar.length;
}
return 0;
};
return isTrim(buf, pos);
},
// Keep it in case we implement the `cast_int` option
// __isInt(value){
// // return Number.isInteger(parseInt(value))
// // return !isNaN( parseInt( obj ) );
// return /^(\-|\+)?[1-9][0-9]*$/.test(value)
// }
__isFloat: function (value) {
return value - parseFloat(value) + 1 >= 0; // Borrowed from jquery
},
__compareBytes: function (sourceBuf, targetBuf, targetPos, firstByte) {
if (sourceBuf[0] !== firstByte) return 0;
const sourceLength = sourceBuf.length;
for (let i = 1; i < sourceLength; i++) {
if (sourceBuf[i] !== targetBuf[targetPos + i]) return 0;
}
return sourceLength;
},
__isDelimiter: function (buf, pos, chr) {
const { delimiter, ignore_last_delimiters } = this.options;
if (
ignore_last_delimiters === true &&
this.state.record.length === this.options.columns.length - 1
) {
return 0;
} else if (
ignore_last_delimiters !== false &&
typeof ignore_last_delimiters === "number" &&
this.state.record.length === ignore_last_delimiters - 1
) {
return 0;
}
loop1: for (let i = 0; i < delimiter.length; i++) {
const del = delimiter[i];
if (del[0] === chr) {
for (let j = 1; j < del.length; j++) {
if (del[j] !== buf[pos + j]) continue loop1;
}
return del.length;
}
}
return 0;
},
__isRecordDelimiter: function (chr, buf, pos) {
const { record_delimiter } = this.options;
const recordDelimiterLength = record_delimiter.length;
loop1: for (let i = 0; i < recordDelimiterLength; i++) {
const rd = record_delimiter[i];
const rdLength = rd.length;
if (rd[0] !== chr) {
continue;
}
for (let j = 1; j < rdLength; j++) {
if (rd[j] !== buf[pos + j]) {
continue loop1;
}
}
return rd.length;
}
return 0;
},
__isEscape: function (buf, pos, chr) {
const { escape } = this.options;
if (escape === null) return false;
const l = escape.length;
if (escape[0] === chr) {
for (let i = 0; i < l; i++) {
if (escape[i] !== buf[pos + i]) {
return false;
}
}
return true;
}
return false;
},
__isQuote: function (buf, pos) {
const { quote } = this.options;
if (quote === null) return false;
const l = quote.length;
for (let i = 0; i < l; i++) {
if (quote[i] !== buf[pos + i]) {
return false;
}
}
return true;
},
__autoDiscoverRecordDelimiter: function (buf, pos) {
const { encoding } = this.options;
// Note, we don't need to cache this information in state,
// It is only called on the first line until we find out a suitable
// record delimiter.
const rds = [
// Important, the windows line ending must be before mac os 9
Buffer.from("\r\n", encoding),
Buffer.from("\n", encoding),
Buffer.from("\r", encoding),
];
loop: for (let i = 0; i < rds.length; i++) {
const l = rds[i].length;
for (let j = 0; j < l; j++) {
if (rds[i][j] !== buf[pos + j]) {
continue loop;
}
}
this.options.record_delimiter.push(rds[i]);
this.state.recordDelimiterMaxLength = rds[i].length;
return rds[i].length;
}
return 0;
},
__error: function (msg) {
const { encoding, raw, skip_records_with_error } = this.options;
const err = typeof msg === "string" ? new Error(msg) : msg;
if (skip_records_with_error) {
this.state.recordHasError = true;
if (this.options.on_skip !== undefined) {
try {
this.options.on_skip(
err,
raw ? this.state.rawBuffer.toString(encoding) : undefined,
);
} catch (err) {
return err;
}
}
// this.emit('skip', err, raw ? this.state.rawBuffer.toString(encoding) : undefined);
return undefined;
} else {
return err;
}
},
__infoDataSet: function () {
return {
...this.info,
columns: this.options.columns,
};
},
__infoRecord: function () {
const { columns, raw, encoding } = this.options;
return {
...this.__infoDataSet(),
error: this.state.error,
header: columns === true,
index: this.state.record.length,
raw: raw ? this.state.rawBuffer.toString(encoding) : undefined,
};
},
__infoField: function () {
const { columns } = this.options;
const isColumns = Array.isArray(columns);
return {
...this.__infoRecord(),
column:
isColumns === true
? columns.length > this.state.record.length
? columns[this.state.record.length].name
: null
: this.state.record.length,
quoting: this.state.wasQuoting,
};
},
};
};
export { transform, CsvError };

View File

@@ -0,0 +1,68 @@
import ResizeableBuffer from "../utils/ResizeableBuffer.js";
// white space characters
// https://en.wikipedia.org/wiki/Whitespace_character
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions/Character_Classes#Types
// \f\n\r\t\v\u00a0\u1680\u2000-\u200a\u2028\u2029\u202f\u205f\u3000\ufeff
const np = 12;
const cr = 13; // `\r`, carriage return, 0x0D in hexadécimal, 13 in decimal
const nl = 10; // `\n`, newline, 0x0A in hexadecimal, 10 in decimal
const space = 32;
const tab = 9;
const init_state = function (options) {
return {
bomSkipped: false,
bufBytesStart: 0,
castField: options.cast_function,
commenting: false,
// Current error encountered by a record
error: undefined,
enabled: options.from_line === 1,
escaping: false,
escapeIsQuote:
Buffer.isBuffer(options.escape) &&
Buffer.isBuffer(options.quote) &&
Buffer.compare(options.escape, options.quote) === 0,
// columns can be `false`, `true`, `Array`
expectedRecordLength: Array.isArray(options.columns)
? options.columns.length
: undefined,
field: new ResizeableBuffer(20),
firstLineToHeaders: options.cast_first_line_to_header,
needMoreDataSize: Math.max(
// Skip if the remaining buffer smaller than comment
options.comment !== null ? options.comment.length : 0,
// Skip if the remaining buffer can be delimiter
...options.delimiter.map((delimiter) => delimiter.length),
// Skip if the remaining buffer can be escape sequence
options.quote !== null ? options.quote.length : 0,
),
previousBuf: undefined,
quoting: false,
stop: false,
rawBuffer: new ResizeableBuffer(100),
record: [],
recordHasError: false,
record_length: 0,
recordDelimiterMaxLength:
options.record_delimiter.length === 0
? 0
: Math.max(...options.record_delimiter.map((v) => v.length)),
trimChars: [
Buffer.from(" ", options.encoding)[0],
Buffer.from("\t", options.encoding)[0],
],
wasQuoting: false,
wasRowDelimiter: false,
timchars: [
Buffer.from(Buffer.from([cr], "utf8").toString(), options.encoding),
Buffer.from(Buffer.from([nl], "utf8").toString(), options.encoding),
Buffer.from(Buffer.from([np], "utf8").toString(), options.encoding),
Buffer.from(Buffer.from([space], "utf8").toString(), options.encoding),
Buffer.from(Buffer.from([tab], "utf8").toString(), options.encoding),
],
};
};
export { init_state };

View File

@@ -0,0 +1,32 @@
import { CsvError } from "./CsvError.js";
import { is_object } from "../utils/is_object.js";
const normalize_columns_array = function (columns) {
const normalizedColumns = [];
for (let i = 0, l = columns.length; i < l; i++) {
const column = columns[i];
if (column === undefined || column === null || column === false) {
normalizedColumns[i] = { disabled: true };
} else if (typeof column === "string") {
normalizedColumns[i] = { name: column };
} else if (is_object(column)) {
if (typeof column.name !== "string") {
throw new CsvError("CSV_OPTION_COLUMNS_MISSING_NAME", [
"Option columns missing name:",
`property "name" is required at position ${i}`,
"when column is an object literal",
]);
}
normalizedColumns[i] = column;
} else {
throw new CsvError("CSV_INVALID_COLUMN_DEFINITION", [
"Invalid column definition:",
"expect a string or a literal object,",
`got ${JSON.stringify(column)} at position ${i}`,
]);
}
}
return normalizedColumns;
};
export { normalize_columns_array };

View File

@@ -0,0 +1,691 @@
import { normalize_columns_array } from "./normalize_columns_array.js";
import { CsvError } from "./CsvError.js";
import { underscore } from "../utils/underscore.js";
const normalize_options = function (opts) {
const options = {};
// Merge with user options
for (const opt in opts) {
options[underscore(opt)] = opts[opt];
}
// Normalize option `encoding`
// Note: defined first because other options depends on it
// to convert chars/strings into buffers.
if (options.encoding === undefined || options.encoding === true) {
options.encoding = "utf8";
} else if (options.encoding === null || options.encoding === false) {
options.encoding = null;
} else if (
typeof options.encoding !== "string" &&
options.encoding !== null
) {
throw new CsvError(
"CSV_INVALID_OPTION_ENCODING",
[
"Invalid option encoding:",
"encoding must be a string or null to return a buffer,",
`got ${JSON.stringify(options.encoding)}`,
],
options,
);
}
// Normalize option `bom`
if (
options.bom === undefined ||
options.bom === null ||
options.bom === false
) {
options.bom = false;
} else if (options.bom !== true) {
throw new CsvError(
"CSV_INVALID_OPTION_BOM",
[
"Invalid option bom:",
"bom must be true,",
`got ${JSON.stringify(options.bom)}`,
],
options,
);
}
// Normalize option `cast`
options.cast_function = null;
if (
options.cast === undefined ||
options.cast === null ||
options.cast === false ||
options.cast === ""
) {
options.cast = undefined;
} else if (typeof options.cast === "function") {
options.cast_function = options.cast;
options.cast = true;
} else if (options.cast !== true) {
throw new CsvError(
"CSV_INVALID_OPTION_CAST",
[
"Invalid option cast:",
"cast must be true or a function,",
`got ${JSON.stringify(options.cast)}`,
],
options,
);
}
// Normalize option `cast_date`
if (
options.cast_date === undefined ||
options.cast_date === null ||
options.cast_date === false ||
options.cast_date === ""
) {
options.cast_date = false;
} else if (options.cast_date === true) {
options.cast_date = function (value) {
const date = Date.parse(value);
return !isNaN(date) ? new Date(date) : value;
};
} else if (typeof options.cast_date !== "function") {
throw new CsvError(
"CSV_INVALID_OPTION_CAST_DATE",
[
"Invalid option cast_date:",
"cast_date must be true or a function,",
`got ${JSON.stringify(options.cast_date)}`,
],
options,
);
}
// Normalize option `columns`
options.cast_first_line_to_header = undefined;
if (options.columns === true) {
// Fields in the first line are converted as-is to columns
options.cast_first_line_to_header = undefined;
} else if (typeof options.columns === "function") {
options.cast_first_line_to_header = options.columns;
options.columns = true;
} else if (Array.isArray(options.columns)) {
options.columns = normalize_columns_array(options.columns);
} else if (
options.columns === undefined ||
options.columns === null ||
options.columns === false
) {
options.columns = false;
} else {
throw new CsvError(
"CSV_INVALID_OPTION_COLUMNS",
[
"Invalid option columns:",
"expect an array, a function or true,",
`got ${JSON.stringify(options.columns)}`,
],
options,
);
}
// Normalize option `group_columns_by_name`
if (
options.group_columns_by_name === undefined ||
options.group_columns_by_name === null ||
options.group_columns_by_name === false
) {
options.group_columns_by_name = false;
} else if (options.group_columns_by_name !== true) {
throw new CsvError(
"CSV_INVALID_OPTION_GROUP_COLUMNS_BY_NAME",
[
"Invalid option group_columns_by_name:",
"expect an boolean,",
`got ${JSON.stringify(options.group_columns_by_name)}`,
],
options,
);
} else if (options.columns === false) {
throw new CsvError(
"CSV_INVALID_OPTION_GROUP_COLUMNS_BY_NAME",
[
"Invalid option group_columns_by_name:",
"the `columns` mode must be activated.",
],
options,
);
}
// Normalize option `comment`
if (
options.comment === undefined ||
options.comment === null ||
options.comment === false ||
options.comment === ""
) {
options.comment = null;
} else {
if (typeof options.comment === "string") {
options.comment = Buffer.from(options.comment, options.encoding);
}
if (!Buffer.isBuffer(options.comment)) {
throw new CsvError(
"CSV_INVALID_OPTION_COMMENT",
[
"Invalid option comment:",
"comment must be a buffer or a string,",
`got ${JSON.stringify(options.comment)}`,
],
options,
);
}
}
// Normalize option `comment_no_infix`
if (
options.comment_no_infix === undefined ||
options.comment_no_infix === null ||
options.comment_no_infix === false
) {
options.comment_no_infix = false;
} else if (options.comment_no_infix !== true) {
throw new CsvError(
"CSV_INVALID_OPTION_COMMENT",
[
"Invalid option comment_no_infix:",
"value must be a boolean,",
`got ${JSON.stringify(options.comment_no_infix)}`,
],
options,
);
}
// Normalize option `delimiter`
const delimiter_json = JSON.stringify(options.delimiter);
if (!Array.isArray(options.delimiter))
options.delimiter = [options.delimiter];
if (options.delimiter.length === 0) {
throw new CsvError(
"CSV_INVALID_OPTION_DELIMITER",
[
"Invalid option delimiter:",
"delimiter must be a non empty string or buffer or array of string|buffer,",
`got ${delimiter_json}`,
],
options,
);
}
options.delimiter = options.delimiter.map(function (delimiter) {
if (delimiter === undefined || delimiter === null || delimiter === false) {
return Buffer.from(",", options.encoding);
}
if (typeof delimiter === "string") {
delimiter = Buffer.from(delimiter, options.encoding);
}
if (!Buffer.isBuffer(delimiter) || delimiter.length === 0) {
throw new CsvError(
"CSV_INVALID_OPTION_DELIMITER",
[
"Invalid option delimiter:",
"delimiter must be a non empty string or buffer or array of string|buffer,",
`got ${delimiter_json}`,
],
options,
);
}
return delimiter;
});
// Normalize option `escape`
if (options.escape === undefined || options.escape === true) {
options.escape = Buffer.from('"', options.encoding);
} else if (typeof options.escape === "string") {
options.escape = Buffer.from(options.escape, options.encoding);
} else if (options.escape === null || options.escape === false) {
options.escape = null;
}
if (options.escape !== null) {
if (!Buffer.isBuffer(options.escape)) {
throw new Error(
`Invalid Option: escape must be a buffer, a string or a boolean, got ${JSON.stringify(options.escape)}`,
);
}
}
// Normalize option `from`
if (options.from === undefined || options.from === null) {
options.from = 1;
} else {
if (typeof options.from === "string" && /\d+/.test(options.from)) {
options.from = parseInt(options.from);
}
if (Number.isInteger(options.from)) {
if (options.from < 0) {
throw new Error(
`Invalid Option: from must be a positive integer, got ${JSON.stringify(opts.from)}`,
);
}
} else {
throw new Error(
`Invalid Option: from must be an integer, got ${JSON.stringify(options.from)}`,
);
}
}
// Normalize option `from_line`
if (options.from_line === undefined || options.from_line === null) {
options.from_line = 1;
} else {
if (
typeof options.from_line === "string" &&
/\d+/.test(options.from_line)
) {
options.from_line = parseInt(options.from_line);
}
if (Number.isInteger(options.from_line)) {
if (options.from_line <= 0) {
throw new Error(
`Invalid Option: from_line must be a positive integer greater than 0, got ${JSON.stringify(opts.from_line)}`,
);
}
} else {
throw new Error(
`Invalid Option: from_line must be an integer, got ${JSON.stringify(opts.from_line)}`,
);
}
}
// Normalize options `ignore_last_delimiters`
if (
options.ignore_last_delimiters === undefined ||
options.ignore_last_delimiters === null
) {
options.ignore_last_delimiters = false;
} else if (typeof options.ignore_last_delimiters === "number") {
options.ignore_last_delimiters = Math.floor(options.ignore_last_delimiters);
if (options.ignore_last_delimiters === 0) {
options.ignore_last_delimiters = false;
}
} else if (typeof options.ignore_last_delimiters !== "boolean") {
throw new CsvError(
"CSV_INVALID_OPTION_IGNORE_LAST_DELIMITERS",
[
"Invalid option `ignore_last_delimiters`:",
"the value must be a boolean value or an integer,",
`got ${JSON.stringify(options.ignore_last_delimiters)}`,
],
options,
);
}
if (options.ignore_last_delimiters === true && options.columns === false) {
throw new CsvError(
"CSV_IGNORE_LAST_DELIMITERS_REQUIRES_COLUMNS",
[
"The option `ignore_last_delimiters`",
"requires the activation of the `columns` option",
],
options,
);
}
// Normalize option `info`
if (
options.info === undefined ||
options.info === null ||
options.info === false
) {
options.info = false;
} else if (options.info !== true) {
throw new Error(
`Invalid Option: info must be true, got ${JSON.stringify(options.info)}`,
);
}
// Normalize option `max_record_size`
if (
options.max_record_size === undefined ||
options.max_record_size === null ||
options.max_record_size === false
) {
options.max_record_size = 0;
} else if (
Number.isInteger(options.max_record_size) &&
options.max_record_size >= 0
) {
// Great, nothing to do
} else if (
typeof options.max_record_size === "string" &&
/\d+/.test(options.max_record_size)
) {
options.max_record_size = parseInt(options.max_record_size);
} else {
throw new Error(
`Invalid Option: max_record_size must be a positive integer, got ${JSON.stringify(options.max_record_size)}`,
);
}
// Normalize option `objname`
if (
options.objname === undefined ||
options.objname === null ||
options.objname === false
) {
options.objname = undefined;
} else if (Buffer.isBuffer(options.objname)) {
if (options.objname.length === 0) {
throw new Error(`Invalid Option: objname must be a non empty buffer`);
}
if (options.encoding === null) {
// Don't call `toString`, leave objname as a buffer
} else {
options.objname = options.objname.toString(options.encoding);
}
} else if (typeof options.objname === "string") {
if (options.objname.length === 0) {
throw new Error(`Invalid Option: objname must be a non empty string`);
}
// Great, nothing to do
} else if (typeof options.objname === "number") {
// if(options.objname.length === 0){
// throw new Error(`Invalid Option: objname must be a non empty string`);
// }
// Great, nothing to do
} else {
throw new Error(
`Invalid Option: objname must be a string or a buffer, got ${options.objname}`,
);
}
if (options.objname !== undefined) {
if (typeof options.objname === "number") {
if (options.columns !== false) {
throw Error(
"Invalid Option: objname index cannot be combined with columns or be defined as a field",
);
}
} else {
// A string or a buffer
if (options.columns === false) {
throw Error(
"Invalid Option: objname field must be combined with columns or be defined as an index",
);
}
}
}
// Normalize option `on_record`
if (options.on_record === undefined || options.on_record === null) {
options.on_record = undefined;
} else if (typeof options.on_record !== "function") {
throw new CsvError(
"CSV_INVALID_OPTION_ON_RECORD",
[
"Invalid option `on_record`:",
"expect a function,",
`got ${JSON.stringify(options.on_record)}`,
],
options,
);
}
// Normalize option `on_skip`
// options.on_skip ??= (err, chunk) => {
// this.emit('skip', err, chunk);
// };
if (
options.on_skip !== undefined &&
options.on_skip !== null &&
typeof options.on_skip !== "function"
) {
throw new Error(
`Invalid Option: on_skip must be a function, got ${JSON.stringify(options.on_skip)}`,
);
}
// Normalize option `quote`
if (
options.quote === null ||
options.quote === false ||
options.quote === ""
) {
options.quote = null;
} else {
if (options.quote === undefined || options.quote === true) {
options.quote = Buffer.from('"', options.encoding);
} else if (typeof options.quote === "string") {
options.quote = Buffer.from(options.quote, options.encoding);
}
if (!Buffer.isBuffer(options.quote)) {
throw new Error(
`Invalid Option: quote must be a buffer or a string, got ${JSON.stringify(options.quote)}`,
);
}
}
// Normalize option `raw`
if (
options.raw === undefined ||
options.raw === null ||
options.raw === false
) {
options.raw = false;
} else if (options.raw !== true) {
throw new Error(
`Invalid Option: raw must be true, got ${JSON.stringify(options.raw)}`,
);
}
// Normalize option `record_delimiter`
if (options.record_delimiter === undefined) {
options.record_delimiter = [];
} else if (
typeof options.record_delimiter === "string" ||
Buffer.isBuffer(options.record_delimiter)
) {
if (options.record_delimiter.length === 0) {
throw new CsvError(
"CSV_INVALID_OPTION_RECORD_DELIMITER",
[
"Invalid option `record_delimiter`:",
"value must be a non empty string or buffer,",
`got ${JSON.stringify(options.record_delimiter)}`,
],
options,
);
}
options.record_delimiter = [options.record_delimiter];
} else if (!Array.isArray(options.record_delimiter)) {
throw new CsvError(
"CSV_INVALID_OPTION_RECORD_DELIMITER",
[
"Invalid option `record_delimiter`:",
"value must be a string, a buffer or array of string|buffer,",
`got ${JSON.stringify(options.record_delimiter)}`,
],
options,
);
}
options.record_delimiter = options.record_delimiter.map(function (rd, i) {
if (typeof rd !== "string" && !Buffer.isBuffer(rd)) {
throw new CsvError(
"CSV_INVALID_OPTION_RECORD_DELIMITER",
[
"Invalid option `record_delimiter`:",
"value must be a string, a buffer or array of string|buffer",
`at index ${i},`,
`got ${JSON.stringify(rd)}`,
],
options,
);
} else if (rd.length === 0) {
throw new CsvError(
"CSV_INVALID_OPTION_RECORD_DELIMITER",
[
"Invalid option `record_delimiter`:",
"value must be a non empty string or buffer",
`at index ${i},`,
`got ${JSON.stringify(rd)}`,
],
options,
);
}
if (typeof rd === "string") {
rd = Buffer.from(rd, options.encoding);
}
return rd;
});
// Normalize option `relax_column_count`
if (typeof options.relax_column_count === "boolean") {
// Great, nothing to do
} else if (
options.relax_column_count === undefined ||
options.relax_column_count === null
) {
options.relax_column_count = false;
} else {
throw new Error(
`Invalid Option: relax_column_count must be a boolean, got ${JSON.stringify(options.relax_column_count)}`,
);
}
if (typeof options.relax_column_count_less === "boolean") {
// Great, nothing to do
} else if (
options.relax_column_count_less === undefined ||
options.relax_column_count_less === null
) {
options.relax_column_count_less = false;
} else {
throw new Error(
`Invalid Option: relax_column_count_less must be a boolean, got ${JSON.stringify(options.relax_column_count_less)}`,
);
}
if (typeof options.relax_column_count_more === "boolean") {
// Great, nothing to do
} else if (
options.relax_column_count_more === undefined ||
options.relax_column_count_more === null
) {
options.relax_column_count_more = false;
} else {
throw new Error(
`Invalid Option: relax_column_count_more must be a boolean, got ${JSON.stringify(options.relax_column_count_more)}`,
);
}
// Normalize option `relax_quotes`
if (typeof options.relax_quotes === "boolean") {
// Great, nothing to do
} else if (
options.relax_quotes === undefined ||
options.relax_quotes === null
) {
options.relax_quotes = false;
} else {
throw new Error(
`Invalid Option: relax_quotes must be a boolean, got ${JSON.stringify(options.relax_quotes)}`,
);
}
// Normalize option `skip_empty_lines`
if (typeof options.skip_empty_lines === "boolean") {
// Great, nothing to do
} else if (
options.skip_empty_lines === undefined ||
options.skip_empty_lines === null
) {
options.skip_empty_lines = false;
} else {
throw new Error(
`Invalid Option: skip_empty_lines must be a boolean, got ${JSON.stringify(options.skip_empty_lines)}`,
);
}
// Normalize option `skip_records_with_empty_values`
if (typeof options.skip_records_with_empty_values === "boolean") {
// Great, nothing to do
} else if (
options.skip_records_with_empty_values === undefined ||
options.skip_records_with_empty_values === null
) {
options.skip_records_with_empty_values = false;
} else {
throw new Error(
`Invalid Option: skip_records_with_empty_values must be a boolean, got ${JSON.stringify(options.skip_records_with_empty_values)}`,
);
}
// Normalize option `skip_records_with_error`
if (typeof options.skip_records_with_error === "boolean") {
// Great, nothing to do
} else if (
options.skip_records_with_error === undefined ||
options.skip_records_with_error === null
) {
options.skip_records_with_error = false;
} else {
throw new Error(
`Invalid Option: skip_records_with_error must be a boolean, got ${JSON.stringify(options.skip_records_with_error)}`,
);
}
// Normalize option `rtrim`
if (
options.rtrim === undefined ||
options.rtrim === null ||
options.rtrim === false
) {
options.rtrim = false;
} else if (options.rtrim !== true) {
throw new Error(
`Invalid Option: rtrim must be a boolean, got ${JSON.stringify(options.rtrim)}`,
);
}
// Normalize option `ltrim`
if (
options.ltrim === undefined ||
options.ltrim === null ||
options.ltrim === false
) {
options.ltrim = false;
} else if (options.ltrim !== true) {
throw new Error(
`Invalid Option: ltrim must be a boolean, got ${JSON.stringify(options.ltrim)}`,
);
}
// Normalize option `trim`
if (
options.trim === undefined ||
options.trim === null ||
options.trim === false
) {
options.trim = false;
} else if (options.trim !== true) {
throw new Error(
`Invalid Option: trim must be a boolean, got ${JSON.stringify(options.trim)}`,
);
}
// Normalize options `trim`, `ltrim` and `rtrim`
if (options.trim === true && opts.ltrim !== false) {
options.ltrim = true;
} else if (options.ltrim !== true) {
options.ltrim = false;
}
if (options.trim === true && opts.rtrim !== false) {
options.rtrim = true;
} else if (options.rtrim !== true) {
options.rtrim = false;
}
// Normalize option `to`
if (options.to === undefined || options.to === null) {
options.to = -1;
} else if (options.to !== -1) {
if (typeof options.to === "string" && /\d+/.test(options.to)) {
options.to = parseInt(options.to);
}
if (Number.isInteger(options.to)) {
if (options.to <= 0) {
throw new Error(
`Invalid Option: to must be a positive integer greater than 0, got ${JSON.stringify(opts.to)}`,
);
}
} else {
throw new Error(
`Invalid Option: to must be an integer, got ${JSON.stringify(opts.to)}`,
);
}
}
// Normalize option `to_line`
if (options.to_line === undefined || options.to_line === null) {
options.to_line = -1;
} else if (options.to_line !== -1) {
if (typeof options.to_line === "string" && /\d+/.test(options.to_line)) {
options.to_line = parseInt(options.to_line);
}
if (Number.isInteger(options.to_line)) {
if (options.to_line <= 0) {
throw new Error(
`Invalid Option: to_line must be a positive integer greater than 0, got ${JSON.stringify(opts.to_line)}`,
);
}
} else {
throw new Error(
`Invalid Option: to_line must be an integer, got ${JSON.stringify(opts.to_line)}`,
);
}
}
return options;
};
export { normalize_options };

507
Jira_helper/node_modules/csv-parse/lib/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,507 @@
// Original definitions in https://github.com/DefinitelyTyped/DefinitelyTyped by: David Muller <https://github.com/davidm77>
/// <reference types="node" />
import * as stream from "stream";
export type Callback<T = string[]> = (
err: CsvError | undefined,
records: T[],
info?: Info,
) => void;
// export interface Parser extends stream.Transform {}
// export class Parser<T> extends stream.Transform {
export class Parser extends stream.Transform {
constructor(options: Options);
// __push(line: T): CsvError | undefined;
__push(line: any): CsvError | undefined;
// __write(chars: any, end: any, callback: any): any;
readonly options: OptionsNormalized;
readonly info: Info;
}
export interface Info {
/**
* Count the number of lines being fully commented.
*/
readonly comment_lines: number;
/**
* Count the number of processed empty lines.
*/
readonly empty_lines: number;
/**
* The number of lines encountered in the source dataset, start at 1 for the first line.
*/
readonly lines: number;
/**
* Count the number of processed records.
*/
readonly records: number;
/**
* Count of the number of processed bytes.
*/
readonly bytes: number;
/**
* Number of non uniform records when `relax_column_count` is true.
*/
readonly invalid_field_length: number;
/**
* Normalized verion of `options.columns` when `options.columns` is true, boolean otherwise.
*/
readonly columns: boolean | { name: string }[] | { disabled: true }[];
}
export interface CastingContext {
readonly column: number | string;
readonly empty_lines: number;
readonly error: CsvError;
readonly header: boolean;
readonly index: number;
readonly quoting: boolean;
readonly lines: number;
readonly raw: string | undefined;
readonly records: number;
readonly invalid_field_length: number;
}
export type CastingFunction = (
value: string,
context: CastingContext,
) => unknown;
export type CastingDateFunction = (
value: string,
context: CastingContext,
) => Date;
export type ColumnOption<K = string> =
| K
| undefined
| null
| false
| { name: K };
export interface OptionsNormalized<T = string[]> {
auto_parse?: boolean | CastingFunction;
auto_parse_date?: boolean | CastingDateFunction;
/**
* If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
*/
bom?: boolean;
/**
* If true, the parser will attempt to convert input string to native types.
* If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
*/
cast?: boolean | CastingFunction;
/**
* If true, the parser will attempt to convert input string to dates.
* If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
*/
cast_date?: boolean | CastingDateFunction;
/**
* Internal property string the function to
*/
cast_first_line_to_header?: (
record: T,
) => ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[];
/**
* List of fields as an array, a user defined callback accepting the first
* line and returning the column names or true if autodiscovered in the first
* CSV line, default to null, affect the result data set in the sense that
* records will be objects instead of arrays.
*/
columns:
| boolean
| ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[];
/**
* Convert values into an array of values when columns are activated and
* when multiple columns of the same name are found.
*/
group_columns_by_name: boolean;
/**
* Treat all the characters after this one as a comment, default to '' (disabled).
*/
comment: string | null;
/**
* Restrict the definition of comments to a full line. Comment characters
* defined in the middle of the line are not interpreted as such. The
* option require the activation of comments.
*/
comment_no_infix: boolean;
/**
* Set the field delimiter. One character only, defaults to comma.
*/
delimiter: Buffer[];
/**
* Set the source and destination encoding, a value of `null` returns buffer instead of strings.
*/
encoding: BufferEncoding | null;
/**
* Set the escape character, one character only, defaults to double quotes.
*/
escape: null | Buffer;
/**
* Start handling records from the requested number of records.
*/
from: number;
/**
* Start handling records from the requested line number.
*/
from_line: number;
/**
* Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
*/
ignore_last_delimiters: boolean | number;
/**
* Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
*/
info: boolean;
/**
* If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
ltrim: boolean;
/**
* Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
* used to guard against a wrong delimiter or record_delimiter,
* default to 128000 characters.
*/
max_record_size: number;
/**
* Name of header-record title to name objects by.
*/
objname: number | string | undefined;
/**
* Alter and filter records by executing a user defined function.
*/
on_record?: (record: T, context: CastingContext) => T | undefined;
/**
* Optional character surrounding a field, one character only, defaults to double quotes.
*/
quote?: Buffer | null;
/**
* Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
*/
raw: boolean;
/**
* Discard inconsistent columns count, default to false.
*/
relax_column_count: boolean;
/**
* Discard inconsistent columns count when the record contains less fields than expected, default to false.
*/
relax_column_count_less: boolean;
/**
* Discard inconsistent columns count when the record contains more fields than expected, default to false.
*/
relax_column_count_more: boolean;
/**
* Preserve quotes inside unquoted field.
*/
relax_quotes: boolean;
/**
* One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
* Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
*/
record_delimiter: Buffer[];
/**
* If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
rtrim: boolean;
/**
* Dont generate empty values for empty lines.
* Defaults to false
*/
skip_empty_lines: boolean;
/**
* Skip a line with error found inside and directly go process the next line.
*/
skip_records_with_error: boolean;
/**
* Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
*/
skip_records_with_empty_values: boolean;
/**
* Stop handling records after the requested number of records.
*/
to: number;
/**
* Stop handling records after the requested line number.
*/
to_line: number;
/**
* If true, ignore whitespace immediately around the delimiter, defaults to false.
* Does not remove whitespace in a quoted field.
*/
trim: boolean;
}
/*
Note, could not `extends stream.TransformOptions` because encoding can be
BufferEncoding and undefined as well as null which is not defined in the
extended type.
*/
export interface Options<T = string[]> {
/**
* If true, the parser will attempt to convert read data types to native types.
* @deprecated Use {@link cast}
*/
auto_parse?: boolean | CastingFunction;
autoParse?: boolean | CastingFunction;
/**
* If true, the parser will attempt to convert read data types to dates. It requires the "auto_parse" option.
* @deprecated Use {@link cast_date}
*/
auto_parse_date?: boolean | CastingDateFunction;
autoParseDate?: boolean | CastingDateFunction;
/**
* If true, detect and exclude the byte order mark (BOM) from the CSV input if present.
*/
bom?: boolean;
/**
* If true, the parser will attempt to convert input string to native types.
* If a function, receive the value as first argument, a context as second argument and return a new value. More information about the context properties is available below.
*/
cast?: boolean | CastingFunction;
/**
* If true, the parser will attempt to convert input string to dates.
* If a function, receive the value as argument and return a new value. It requires the "auto_parse" option. Be careful, it relies on Date.parse.
*/
cast_date?: boolean | CastingDateFunction;
castDate?: boolean | CastingDateFunction;
/**
* List of fields as an array,
* a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line,
* default to null,
* affect the result data set in the sense that records will be objects instead of arrays.
*/
columns?:
| boolean
| ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[]
| ((
record: T,
) => ColumnOption<
T extends string[] ? string : T extends unknown ? string : keyof T
>[]);
/**
* Convert values into an array of values when columns are activated and
* when multiple columns of the same name are found.
*/
group_columns_by_name?: boolean;
groupColumnsByName?: boolean;
/**
* Treat all the characters after this one as a comment, default to '' (disabled).
*/
comment?: string | boolean | null;
/**
* Restrict the definition of comments to a full line. Comment characters
* defined in the middle of the line are not interpreted as such. The
* option require the activation of comments.
*/
comment_no_infix?: boolean | null;
/**
* Set the field delimiter. One character only, defaults to comma.
*/
delimiter?: string | string[] | Buffer;
/**
* Set the source and destination encoding, a value of `null` returns buffer instead of strings.
*/
encoding?: BufferEncoding | boolean | null | undefined;
/**
* Set the escape character, one character only, defaults to double quotes.
*/
escape?: string | null | boolean | Buffer;
/**
* Start handling records from the requested number of records.
*/
from?: number | string;
/**
* Start handling records from the requested line number.
*/
from_line?: null | number | string;
fromLine?: null | number | string;
/**
* Don't interpret delimiters as such in the last field according to the number of fields calculated from the number of columns, the option require the presence of the `column` option when `true`.
*/
ignore_last_delimiters?: boolean | number;
/**
* Generate two properties `info` and `record` where `info` is a snapshot of the info object at the time the record was created and `record` is the parsed array or object.
*/
info?: boolean;
/**
* If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
ltrim?: boolean | null;
/**
* Maximum numer of characters to be contained in the field and line buffers before an exception is raised,
* used to guard against a wrong delimiter or record_delimiter,
* default to 128000 characters.
*/
max_record_size?: number | null | string;
maxRecordSize?: number;
/**
* Name of header-record title to name objects by.
*/
objname?: Buffer | null | number | string;
/**
* Alter and filter records by executing a user defined function.
*/
on_record?: (record: T, context: CastingContext) => T | null | undefined;
onRecord?: (record: T, context: CastingContext) => T | null | undefined;
/**
* Function called when an error occured if the `skip_records_with_error`
* option is activated.
*/
on_skip?: (err: CsvError | undefined, raw: string | undefined) => undefined;
onSkip?: (err: CsvError | undefined, raw: string | undefined) => undefined;
/**
* Optional character surrounding a field, one character only, defaults to double quotes.
*/
quote?: string | boolean | Buffer | null;
/**
* Generate two properties raw and row where raw is the original CSV row content and row is the parsed array or object.
*/
raw?: boolean | null;
/**
* One or multiple characters used to delimit record rows; defaults to auto discovery if not provided.
* Supported auto discovery method are Linux ("\n"), Apple ("\r") and Windows ("\r\n") row delimiters.
*/
record_delimiter?: string | Buffer | null | (string | Buffer | null)[];
recordDelimiter?: string | Buffer | null | (string | Buffer | null)[];
/**
* Discard inconsistent columns count, default to false.
*/
relax_column_count?: boolean | null;
relaxColumnCount?: boolean | null;
/**
* Discard inconsistent columns count when the record contains less fields than expected, default to false.
*/
relax_column_count_less?: boolean | null;
relaxColumnCountLess?: boolean | null;
/**
* Discard inconsistent columns count when the record contains more fields than expected, default to false.
*/
relax_column_count_more?: boolean | null;
relaxColumnCountMore?: boolean | null;
/**
* Preserve quotes inside unquoted field.
*/
relax_quotes?: boolean | null;
relaxQuotes?: boolean | null;
/**
* If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
* Does not remove whitespace in a quoted field.
*/
rtrim?: boolean | null;
/**
* Dont generate empty values for empty lines.
* Defaults to false
*/
skip_empty_lines?: boolean | null;
skipEmptyLines?: boolean | null;
/**
* Don't generate records for lines containing empty column values (column matching /\s*\/), defaults to false.
*/
skip_records_with_empty_values?: boolean | null;
skipRecordsWithEmptyValues?: boolean | null;
/**
* Skip a line with error found inside and directly go process the next line.
*/
skip_records_with_error?: boolean | null;
skipRecordsWithError?: boolean | null;
/**
* Stop handling records after the requested number of records.
*/
to?: null | number | string;
/**
* Stop handling records after the requested line number.
*/
to_line?: null | number | string;
toLine?: null | number | string;
/**
* If true, ignore whitespace immediately around the delimiter, defaults to false.
* Does not remove whitespace in a quoted field.
*/
trim?: boolean | null;
}
export type CsvErrorCode =
| "CSV_INVALID_ARGUMENT"
| "CSV_INVALID_CLOSING_QUOTE"
| "CSV_INVALID_COLUMN_DEFINITION"
| "CSV_INVALID_COLUMN_MAPPING"
| "CSV_INVALID_OPTION_BOM"
| "CSV_INVALID_OPTION_CAST"
| "CSV_INVALID_OPTION_CAST_DATE"
| "CSV_INVALID_OPTION_COLUMNS"
| "CSV_INVALID_OPTION_COMMENT"
| "CSV_INVALID_OPTION_DELIMITER"
| "CSV_INVALID_OPTION_GROUP_COLUMNS_BY_NAME"
| "CSV_INVALID_OPTION_ON_RECORD"
| "CSV_MAX_RECORD_SIZE"
| "CSV_NON_TRIMABLE_CHAR_AFTER_CLOSING_QUOTE"
| "CSV_OPTION_COLUMNS_MISSING_NAME"
| "CSV_QUOTE_NOT_CLOSED"
| "CSV_RECORD_INCONSISTENT_FIELDS_LENGTH"
| "CSV_RECORD_INCONSISTENT_COLUMNS"
| "CSV_UNKNOWN_ERROR"
| "INVALID_OPENING_QUOTE";
export class CsvError extends Error {
readonly code: CsvErrorCode;
[key: string]: any;
constructor(
code: CsvErrorCode,
message: string | string[],
options?: OptionsNormalized,
...contexts: unknown[]
);
}
type OptionsWithColumns<T> = Omit<Options<T>, "columns"> & {
columns: Exclude<Options["columns"], undefined | false>;
};
declare function parse<T = unknown>(
input: string | Buffer | Uint8Array,
options: OptionsWithColumns<T>,
callback?: Callback<T>,
): Parser;
declare function parse(
input: string | Buffer | Uint8Array,
options: Options,
callback?: Callback,
): Parser;
declare function parse<T = unknown>(
options: OptionsWithColumns<T>,
callback?: Callback<T>,
): Parser;
declare function parse(options: Options, callback?: Callback): Parser;
declare function parse(
input: string | Buffer | Uint8Array,
callback?: Callback,
): Parser;
declare function parse(callback?: Callback): Parser;
// export default parse;
export { parse };
declare function normalize_options(opts: Options): OptionsNormalized;
export { normalize_options };

138
Jira_helper/node_modules/csv-parse/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,138 @@
/*
CSV Parse
Please look at the [project documentation](https://csv.js.org/parse/) for
additional information.
*/
import { Transform } from "stream";
import { is_object } from "./utils/is_object.js";
import { transform } from "./api/index.js";
import { CsvError } from "./api/CsvError.js";
import { normalize_options } from "./api/normalize_options.js";
class Parser extends Transform {
constructor(opts = {}) {
super({ ...{ readableObjectMode: true }, ...opts, encoding: null });
this.api = transform({
on_skip: (err, chunk) => {
this.emit("skip", err, chunk);
},
...opts,
});
// Backward compatibility
this.state = this.api.state;
this.options = this.api.options;
this.info = this.api.info;
}
// Implementation of `Transform._transform`
_transform(buf, _, callback) {
if (this.state.stop === true) {
return;
}
const err = this.api.parse(
buf,
false,
(record) => {
this.push(record);
},
() => {
this.push(null);
this.end();
// Fix #333 and break #410
// ko: api.stream.iterator.coffee
// ko with v21.4.0, ok with node v20.5.1: api.stream.finished # aborted (with generate())
// ko: api.stream.finished # aborted (with Readable)
// this.destroy()
// Fix #410 and partially break #333
// ok: api.stream.iterator.coffee
// ok: api.stream.finished # aborted (with generate())
// broken: api.stream.finished # aborted (with Readable)
this.on("end", this.destroy);
},
);
if (err !== undefined) {
this.state.stop = true;
}
callback(err);
}
// Implementation of `Transform._flush`
_flush(callback) {
if (this.state.stop === true) {
return;
}
const err = this.api.parse(
undefined,
true,
(record) => {
this.push(record);
},
() => {
this.push(null);
this.on("end", this.destroy);
},
);
callback(err);
}
}
const parse = function () {
let data, options, callback;
for (const i in arguments) {
const argument = arguments[i];
const type = typeof argument;
if (
data === undefined &&
(typeof argument === "string" || Buffer.isBuffer(argument))
) {
data = argument;
} else if (options === undefined && is_object(argument)) {
options = argument;
} else if (callback === undefined && type === "function") {
callback = argument;
} else {
throw new CsvError(
"CSV_INVALID_ARGUMENT",
["Invalid argument:", `got ${JSON.stringify(argument)} at index ${i}`],
options || {},
);
}
}
const parser = new Parser(options);
if (callback) {
const records =
options === undefined || options.objname === undefined ? [] : {};
parser.on("readable", function () {
let record;
while ((record = this.read()) !== null) {
if (options === undefined || options.objname === undefined) {
records.push(record);
} else {
records[record[0]] = record[1];
}
}
});
parser.on("error", function (err) {
callback(err, undefined, parser.api.__infoDataSet());
});
parser.on("end", function () {
callback(undefined, records, parser.api.__infoDataSet());
});
}
if (data !== undefined) {
const writer = function () {
parser.write(data);
parser.end();
};
// Support Deno, Rollup doesnt provide a shim for setImmediate
if (typeof setImmediate === "function") {
setImmediate(writer);
} else {
setTimeout(writer, 0);
}
}
return parser;
};
// export default parse
export { parse, Parser, CsvError, normalize_options };

17
Jira_helper/node_modules/csv-parse/lib/stream.d.ts generated vendored Normal file
View File

@@ -0,0 +1,17 @@
import { Options } from "./index.js";
declare function parse(options?: Options): TransformStream;
// export default parse;
export { parse };
export {
CastingContext,
CastingFunction,
CastingDateFunction,
ColumnOption,
Options,
OptionsNormalized,
Info,
CsvErrorCode,
CsvError,
} from "./index.js";

36
Jira_helper/node_modules/csv-parse/lib/stream.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
import { TransformStream, CountQueuingStrategy } from "node:stream/web";
import { transform } from "./api/index.js";
const parse = (opts) => {
const api = transform(opts);
let controller;
const enqueue = (record) => {
controller.enqueue(record);
};
const terminate = () => {
controller.terminate();
};
return new TransformStream(
{
start(ctr) {
controller = ctr;
},
transform(chunk) {
const error = api.parse(chunk, false, enqueue, terminate);
if (error) {
controller.error(error);
}
},
flush() {
const error = api.parse(undefined, true, enqueue, terminate);
if (error) {
controller.error(error);
}
},
},
new CountQueuingStrategy({ highWaterMark: 1024 }),
new CountQueuingStrategy({ highWaterMark: 1024 }),
);
};
export { parse };

30
Jira_helper/node_modules/csv-parse/lib/sync.d.ts generated vendored Normal file
View File

@@ -0,0 +1,30 @@
import { Options } from "./index.js";
type OptionsWithColumns<T> = Omit<Options<T>, "columns"> & {
columns: Exclude<Options["columns"], undefined | false>;
};
declare function parse<T = unknown>(
input: Buffer | string | Uint8Array,
options: OptionsWithColumns<T>,
): T[];
declare function parse(
input: Buffer | string | Uint8Array,
options: Options,
): string[][];
declare function parse(input: Buffer | string | Uint8Array): string[][];
// export default parse;
export { parse };
export {
CastingContext,
CastingFunction,
CastingDateFunction,
ColumnOption,
Options,
OptionsNormalized,
Info,
CsvErrorCode,
CsvError,
} from "./index.js";

28
Jira_helper/node_modules/csv-parse/lib/sync.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
import { CsvError, transform } from "./api/index.js";
const parse = function (data, opts = {}) {
if (typeof data === "string") {
data = Buffer.from(data);
}
const records = opts && opts.objname ? {} : [];
const parser = transform(opts);
const push = (record) => {
if (parser.options.objname === undefined) records.push(record);
else {
records[record[0]] = record[1];
}
};
const close = () => {};
const error = parser.parse(data, true, push, close);
if (error !== undefined) throw error;
// 250606: `parser.parse` was implemented as 2 calls:
// const err1 = parser.parse(data, false, push, close);
// if (err1 !== undefined) throw err1;
// const err2 = parser.parse(undefined, true, push, close);
// if (err2 !== undefined) throw err2;
return records;
};
// export default parse
export { parse };
export { CsvError };

View File

@@ -0,0 +1,63 @@
class ResizeableBuffer {
constructor(size = 100) {
this.size = size;
this.length = 0;
this.buf = Buffer.allocUnsafe(size);
}
prepend(val) {
if (Buffer.isBuffer(val)) {
const length = this.length + val.length;
if (length >= this.size) {
this.resize();
if (length >= this.size) {
throw Error("INVALID_BUFFER_STATE");
}
}
const buf = this.buf;
this.buf = Buffer.allocUnsafe(this.size);
val.copy(this.buf, 0);
buf.copy(this.buf, val.length);
this.length += val.length;
} else {
const length = this.length++;
if (length === this.size) {
this.resize();
}
const buf = this.clone();
this.buf[0] = val;
buf.copy(this.buf, 1, 0, length);
}
}
append(val) {
const length = this.length++;
if (length === this.size) {
this.resize();
}
this.buf[length] = val;
}
clone() {
return Buffer.from(this.buf.slice(0, this.length));
}
resize() {
const length = this.length;
this.size = this.size * 2;
const buf = Buffer.allocUnsafe(this.size);
this.buf.copy(buf, 0, 0, length);
this.buf = buf;
}
toString(encoding) {
if (encoding) {
return this.buf.slice(0, this.length).toString(encoding);
} else {
return Uint8Array.prototype.slice.call(this.buf.slice(0, this.length));
}
}
toJSON() {
return this.toString("utf8");
}
reset() {
this.length = 0;
}
}
export default ResizeableBuffer;

View File

@@ -0,0 +1,5 @@
const is_object = function (obj) {
return typeof obj === "object" && obj !== null && !Array.isArray(obj);
};
export { is_object };

View File

@@ -0,0 +1,7 @@
const underscore = function (str) {
return str.replace(/([A-Z])/g, function (_, match) {
return "_" + match.toLowerCase();
});
};
export { underscore };

144
Jira_helper/node_modules/csv-parse/package.json generated vendored Normal file
View File

@@ -0,0 +1,144 @@
{
"version": "6.1.0",
"name": "csv-parse",
"description": "CSV parsing implementing the Node.js `stream.Transform` API",
"keywords": [
"csv",
"parse",
"parser",
"convert",
"tsv",
"stream",
"backend",
"frontend"
],
"author": "David Worms <david@adaltas.com> (https://www.adaltas.com)",
"contributors": [
"David Worms <david@adaltas.com> (https://www.adaltas.com)",
"Will White (https://github.com/willwhite)",
"Justin Latimer (https://github.com/justinlatimer)",
"jonseymour (https://github.com/jonseymour)",
"pascalopitz (https://github.com/pascalopitz)",
"Josh Pschorr (https://github.com/jpschorr)",
"Elad Ben-Israel (https://github.com/eladb)",
"Philippe Plantier (https://github.com/phipla)",
"Tim Oxley (https://github.com/timoxley)",
"Damon Oehlman (https://github.com/DamonOehlman)",
"Alexandru Topliceanu (https://github.com/topliceanu)",
"Visup (https://github.com/visup)",
"Edmund von der Burg (https://github.com/evdb)",
"Douglas Christopher Wilson (https://github.com/dougwilson)",
"Joe Eaves (https://github.com/Joeasaurus)",
"Mark Stosberg (https://github.com/markstos)"
],
"exports": {
".": {
"import": {
"types": "./lib/index.d.ts",
"default": "./lib/index.js"
},
"require": {
"types": "./dist/cjs/index.d.cts",
"default": "./dist/cjs/index.cjs"
}
},
"./sync": {
"import": {
"types": "./lib/sync.d.ts",
"default": "./lib/sync.js"
},
"require": {
"types": "./dist/cjs/sync.d.cts",
"default": "./dist/cjs/sync.cjs"
}
},
"./stream": {
"import": {
"types": "./lib/stream.d.ts",
"default": "./lib/stream.js"
},
"require": {
"types": "./dist/cjs/stream.d.cts",
"default": "./dist/cjs/stream.cjs"
}
},
"./browser/esm": {
"types": "./dist/esm/index.d.ts",
"default": "./dist/esm/index.js"
},
"./browser/esm/sync": {
"types": "./dist/esm/sync.d.ts",
"default": "./dist/esm/sync.js"
}
},
"devDependencies": {
"@rollup/plugin-node-resolve": "^16.0.1",
"@types/mocha": "^10.0.10",
"@types/node": "^22.15.30",
"csv-generate": "^4.5.0",
"csv-spectrum": "^2.0.0",
"dedent": "^1.6.0",
"each": "^2.7.2",
"mocha": "^11.5.0",
"pad": "^3.3.0",
"prettier": "^3.5.3",
"rollup": "^4.41.1",
"rollup-plugin-node-builtins": "^2.1.2",
"rollup-plugin-node-globals": "^1.4.0",
"should": "^13.2.3",
"stream-transform": "^3.4.0",
"ts-node": "^10.9.2",
"typescript": "^5.8.3"
},
"files": [
"dist",
"lib"
],
"homepage": "https://csv.js.org/parse",
"license": "MIT",
"main": "./dist/cjs/index.cjs",
"mocha": {
"inline-diffs": true,
"loader": "ts-node/esm",
"recursive": true,
"reporter": "spec",
"throw-deprecation": false,
"timeout": 40000
},
"repository": {
"type": "git",
"url": "https://github.com/adaltas/node-csv.git",
"directory": "packages/csv-parse"
},
"scripts": {
"build": "npm run build:rollup && npm run build:ts",
"build:rollup": "npx rollup -c",
"build:ts": "cp lib/index.d.ts dist/cjs/index.d.cts && cp lib/sync.d.ts dist/cjs/sync.d.cts && cp lib/*.ts dist/esm",
"postbuild:ts": "find dist/cjs -name '*.d.cts' -exec sh -c \"sed -i \"s/\\.js'/\\.cjs'/g\" {} || sed -i '' \"s/\\.js'/\\.cjs'/g\" {}\" \\;",
"lint:check": "eslint",
"lint:fix": "eslint --fix",
"lint:ts": "tsc --noEmit true",
"preversion": "npm run build && git add dist",
"test": "mocha 'test/**/*.{js,ts}'",
"test:legacy": "mocha --ignore test/api.web_stream.js --ignore test/api.web_stream.ts --ignore test/api.stream.finished.ts --ignore test/api.stream.iterator.ts 'test/**/*.{js,ts}'"
},
"type": "module",
"types": "dist/esm/index.d.ts",
"typesVersions": {
"*": {
".": [
"dist/esm/index.d.ts"
],
"sync": [
"dist/esm/sync.d.ts"
],
"browser/esm": [
"dist/esm/index.d.ts"
],
"browser/esm/sync": [
"dist/esm/sync.d.ts"
]
}
},
"gitHead": "c0e42c9cf0ec6b9e1f453279e36f621ce782d2eb"
}

20
Jira_helper/node_modules/emoji-regex/LICENSE-MIT.txt generated vendored Normal file
View File

@@ -0,0 +1,20 @@
Copyright Mathias Bynens <https://mathiasbynens.be/>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

107
Jira_helper/node_modules/emoji-regex/README.md generated vendored Normal file
View File

@@ -0,0 +1,107 @@
# emoji-regex [![Build status](https://github.com/mathiasbynens/emoji-regex/actions/workflows/main.yml/badge.svg)](https://github.com/mathiasbynens/emoji-regex/actions/workflows/main.yml) [![emoji-regex on npm](https://img.shields.io/npm/v/emoji-regex)](https://www.npmjs.com/package/emoji-regex)
_emoji-regex_ offers a regular expression to match all emoji symbols and sequences (including textual representations of emoji) as per the Unicode Standard. Its based on [_emoji-test-regex-pattern_](https://github.com/mathiasbynens/emoji-test-regex-pattern), which generates (at build time) the regular expression pattern based on the Unicode Standard. As a result, _emoji-regex_ can easily be updated whenever new emoji are added to Unicode.
Since each version of _emoji-regex_ is tied to the latest Unicode version at the time of release, results are deterministic. This is important for use cases like image replacement, where you want to guarantee that an image asset is available for every possibly matched emoji. If you dont need a deterministic regex, a lighter-weight, general emoji pattern is available via the [_emoji-regex-xs_](https://github.com/slevithan/emoji-regex-xs) package that follows the same API.
## Installation
Via [npm](https://www.npmjs.com/):
```bash
npm install emoji-regex
```
In [Node.js](https://nodejs.org/):
```js
const emojiRegex = require('emoji-regex');
// Note: because the regular expression has the global flag set, this module
// exports a function that returns the regex rather than exporting the regular
// expression itself, to make it impossible to (accidentally) mutate the
// original regular expression.
const text = `
\u{231A}: ⌚ default emoji presentation character (Emoji_Presentation)
\u{2194}\u{FE0F}: ↔️ default text presentation character rendered as emoji
\u{1F469}: 👩 emoji modifier base (Emoji_Modifier_Base)
\u{1F469}\u{1F3FF}: 👩🏿 emoji modifier base followed by a modifier
`;
const regex = emojiRegex();
for (const match of text.matchAll(regex)) {
const emoji = match[0];
console.log(`Matched sequence ${ emoji } — code points: ${ [...emoji].length }`);
}
```
Console output:
```
Matched sequence ⌚ — code points: 1
Matched sequence ⌚ — code points: 1
Matched sequence ↔️ — code points: 2
Matched sequence ↔️ — code points: 2
Matched sequence 👩 — code points: 1
Matched sequence 👩 — code points: 1
Matched sequence 👩🏿 — code points: 2
Matched sequence 👩🏿 — code points: 2
```
## For maintainers
### How to update emoji-regex after new Unicode Standard releases
1. [Update _emoji-test-regex-pattern_ as described in its repository](https://github.com/mathiasbynens/emoji-test-regex-pattern#how-to-update-emoji-test-regex-pattern-after-new-uts51-releases).
1. Bump the _emoji-test-regex-pattern_ dependency to the latest version.
1. Update the Unicode data dependency in `package.json` by running the following commands:
```sh
# Example: updating from Unicode v13 to Unicode v14.
npm uninstall @unicode/unicode-13.0.0
npm install @unicode/unicode-14.0.0 --save-dev
````
1. Generate the new output:
```sh
npm run build
```
1. Verify that tests still pass:
```sh
npm test
```
### How to publish a new release
1. On the `main` branch, bump the emoji-regex version number in `package.json`:
```sh
npm version patch -m 'Release v%s'
```
Instead of `patch`, use `minor` or `major` [as needed](https://semver.org/).
Note that this produces a Git commit + tag.
1. Push the release commit and tag:
```sh
git push && git push --tags
```
Our CI then automatically publishes the new release to npm.
## Author
| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") |
|---|
| [Mathias Bynens](https://mathiasbynens.be/) |
## License
_emoji-regex_ is available under the [MIT](https://mths.be/mit) license.

3
Jira_helper/node_modules/emoji-regex/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
declare module 'emoji-regex' {
export default function emojiRegex(): RegExp;
}

4
Jira_helper/node_modules/emoji-regex/index.js generated vendored Normal file

File diff suppressed because one or more lines are too long

4
Jira_helper/node_modules/emoji-regex/index.mjs generated vendored Normal file

File diff suppressed because one or more lines are too long

45
Jira_helper/node_modules/emoji-regex/package.json generated vendored Normal file
View File

@@ -0,0 +1,45 @@
{
"name": "emoji-regex",
"version": "10.5.0",
"description": "A regular expression to match all Emoji-only symbols as per the Unicode Standard.",
"homepage": "https://mths.be/emoji-regex",
"main": "index.js",
"module": "index.mjs",
"types": "index.d.ts",
"keywords": [
"unicode",
"regex",
"regexp",
"regular expressions",
"code points",
"symbols",
"characters",
"emoji"
],
"license": "MIT",
"author": {
"name": "Mathias Bynens",
"url": "https://mathiasbynens.be/"
},
"repository": {
"type": "git",
"url": "https://github.com/mathiasbynens/emoji-regex.git"
},
"bugs": "https://github.com/mathiasbynens/emoji-regex/issues",
"files": [
"LICENSE-MIT.txt",
"index.js",
"index.d.ts",
"index.mjs"
],
"scripts": {
"build": "node script/build.js",
"test": "mocha",
"test:watch": "npm run test -- --watch"
},
"devDependencies": {
"@unicode/unicode-17.0.0": "^1.6.10",
"emoji-test-regex-pattern": "^2.3.0",
"mocha": "^11.7.1"
}
}

22
Jira_helper/node_modules/escalade/dist/index.js generated vendored Normal file
View File

@@ -0,0 +1,22 @@
const { dirname, resolve } = require('path');
const { readdir, stat } = require('fs');
const { promisify } = require('util');
const toStats = promisify(stat);
const toRead = promisify(readdir);
module.exports = async function (start, callback) {
let dir = resolve('.', start);
let tmp, stats = await toStats(dir);
if (!stats.isDirectory()) {
dir = dirname(dir);
}
while (true) {
tmp = await callback(dir, await toRead(dir));
if (tmp) return resolve(dir, tmp);
dir = dirname(tmp = dir);
if (tmp === dir) break;
}
}

22
Jira_helper/node_modules/escalade/dist/index.mjs generated vendored Normal file
View File

@@ -0,0 +1,22 @@
import { dirname, resolve } from 'path';
import { readdir, stat } from 'fs';
import { promisify } from 'util';
const toStats = promisify(stat);
const toRead = promisify(readdir);
export default async function (start, callback) {
let dir = resolve('.', start);
let tmp, stats = await toStats(dir);
if (!stats.isDirectory()) {
dir = dirname(dir);
}
while (true) {
tmp = await callback(dir, await toRead(dir));
if (tmp) return resolve(dir, tmp);
dir = dirname(tmp = dir);
if (tmp === dir) break;
}
}

11
Jira_helper/node_modules/escalade/index.d.mts generated vendored Normal file
View File

@@ -0,0 +1,11 @@
type Promisable<T> = T | Promise<T>;
export type Callback = (
directory: string,
files: string[],
) => Promisable<string | false | void>;
export default function (
directory: string,
callback: Callback,
): Promise<string | void>;

15
Jira_helper/node_modules/escalade/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,15 @@
type Promisable<T> = T | Promise<T>;
declare namespace escalade {
export type Callback = (
directory: string,
files: string[],
) => Promisable<string | false | void>;
}
declare function escalade(
directory: string,
callback: escalade.Callback,
): Promise<string | void>;
export = escalade;

9
Jira_helper/node_modules/escalade/license generated vendored Normal file
View File

@@ -0,0 +1,9 @@
MIT License
Copyright (c) Luke Edwards <luke.edwards05@gmail.com> (lukeed.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

74
Jira_helper/node_modules/escalade/package.json generated vendored Normal file
View File

@@ -0,0 +1,74 @@
{
"name": "escalade",
"version": "3.2.0",
"repository": "lukeed/escalade",
"description": "A tiny (183B to 210B) and fast utility to ascend parent directories",
"module": "dist/index.mjs",
"main": "dist/index.js",
"types": "index.d.ts",
"license": "MIT",
"author": {
"name": "Luke Edwards",
"email": "luke.edwards05@gmail.com",
"url": "https://lukeed.com"
},
"exports": {
".": [
{
"import": {
"types": "./index.d.mts",
"default": "./dist/index.mjs"
},
"require": {
"types": "./index.d.ts",
"default": "./dist/index.js"
}
},
"./dist/index.js"
],
"./sync": [
{
"import": {
"types": "./sync/index.d.mts",
"default": "./sync/index.mjs"
},
"require": {
"types": "./sync/index.d.ts",
"default": "./sync/index.js"
}
},
"./sync/index.js"
]
},
"files": [
"*.d.mts",
"*.d.ts",
"dist",
"sync"
],
"modes": {
"sync": "src/sync.js",
"default": "src/async.js"
},
"engines": {
"node": ">=6"
},
"scripts": {
"build": "bundt",
"pretest": "npm run build",
"test": "uvu -r esm test -i fixtures"
},
"keywords": [
"find",
"parent",
"parents",
"directory",
"search",
"walk"
],
"devDependencies": {
"bundt": "1.1.1",
"esm": "3.2.25",
"uvu": "0.3.3"
}
}

211
Jira_helper/node_modules/escalade/readme.md generated vendored Normal file
View File

@@ -0,0 +1,211 @@
# escalade [![CI](https://github.com/lukeed/escalade/workflows/CI/badge.svg)](https://github.com/lukeed/escalade/actions) [![licenses](https://licenses.dev/b/npm/escalade)](https://licenses.dev/npm/escalade) [![codecov](https://badgen.now.sh/codecov/c/github/lukeed/escalade)](https://codecov.io/gh/lukeed/escalade)
> A tiny (183B to 210B) and [fast](#benchmarks) utility to ascend parent directories
With [escalade](https://en.wikipedia.org/wiki/Escalade), you can scale parent directories until you've found what you're looking for.<br>Given an input file or directory, `escalade` will continue executing your callback function until either:
1) the callback returns a truthy value
2) `escalade` has reached the system root directory (eg, `/`)
> **Important:**<br>Please note that `escalade` only deals with direct ancestry it will not dive into parents' sibling directories.
---
**Notice:** As of v3.1.0, `escalade` now includes [Deno support](http://deno.land/x/escalade)! Please see [Deno Usage](#deno) below.
---
## Install
```
$ npm install --save escalade
```
## Modes
There are two "versions" of `escalade` available:
#### "async"
> **Node.js:** >= 8.x<br>
> **Size (gzip):** 210 bytes<br>
> **Availability:** [CommonJS](https://unpkg.com/escalade/dist/index.js), [ES Module](https://unpkg.com/escalade/dist/index.mjs)
This is the primary/default mode. It makes use of `async`/`await` and [`util.promisify`](https://nodejs.org/api/util.html#util_util_promisify_original).
#### "sync"
> **Node.js:** >= 6.x<br>
> **Size (gzip):** 183 bytes<br>
> **Availability:** [CommonJS](https://unpkg.com/escalade/sync/index.js), [ES Module](https://unpkg.com/escalade/sync/index.mjs)
This is the opt-in mode, ideal for scenarios where `async` usage cannot be supported.
## Usage
***Example Structure***
```
/Users/lukeed
└── oss
├── license
└── escalade
├── package.json
└── test
└── fixtures
├── index.js
└── foobar
└── demo.js
```
***Example Usage***
```js
//~> demo.js
import { join } from 'path';
import escalade from 'escalade';
const input = join(__dirname, 'demo.js');
// or: const input = __dirname;
const pkg = await escalade(input, (dir, names) => {
console.log('~> dir:', dir);
console.log('~> names:', names);
console.log('---');
if (names.includes('package.json')) {
// will be resolved into absolute
return 'package.json';
}
});
//~> dir: /Users/lukeed/oss/escalade/test/fixtures/foobar
//~> names: ['demo.js']
//---
//~> dir: /Users/lukeed/oss/escalade/test/fixtures
//~> names: ['index.js', 'foobar']
//---
//~> dir: /Users/lukeed/oss/escalade/test
//~> names: ['fixtures']
//---
//~> dir: /Users/lukeed/oss/escalade
//~> names: ['package.json', 'test']
//---
console.log(pkg);
//=> /Users/lukeed/oss/escalade/package.json
// Now search for "missing123.txt"
// (Assume it doesn't exist anywhere!)
const missing = await escalade(input, (dir, names) => {
console.log('~> dir:', dir);
return names.includes('missing123.txt') && 'missing123.txt';
});
//~> dir: /Users/lukeed/oss/escalade/test/fixtures/foobar
//~> dir: /Users/lukeed/oss/escalade/test/fixtures
//~> dir: /Users/lukeed/oss/escalade/test
//~> dir: /Users/lukeed/oss/escalade
//~> dir: /Users/lukeed/oss
//~> dir: /Users/lukeed
//~> dir: /Users
//~> dir: /
console.log(missing);
//=> undefined
```
> **Note:** To run the above example with "sync" mode, import from `escalade/sync` and remove the `await` keyword.
## API
### escalade(input, callback)
Returns: `string|void` or `Promise<string|void>`
When your `callback` locates a file, `escalade` will resolve/return with an absolute path.<br>
If your `callback` was never satisfied, then `escalade` will resolve/return with nothing (undefined).
> **Important:**<br>The `sync` and `async` versions share the same API.<br>The **only** difference is that `sync` is not Promise-based.
#### input
Type: `string`
The path from which to start ascending.
This may be a file or a directory path.<br>However, when `input` is a file, `escalade` will begin with its parent directory.
> **Important:** Unless given an absolute path, `input` will be resolved from `process.cwd()` location.
#### callback
Type: `Function`
The callback to execute for each ancestry level. It always is given two arguments:
1) `dir` - an absolute path of the current parent directory
2) `names` - a list (`string[]`) of contents _relative to_ the `dir` parent
> **Note:** The `names` list can contain names of files _and_ directories.
When your callback returns a _falsey_ value, then `escalade` will continue with `dir`'s parent directory, re-invoking your callback with new argument values.
When your callback returns a string, then `escalade` stops iteration immediately.<br>
If the string is an absolute path, then it's left as is. Otherwise, the string is resolved into an absolute path _from_ the `dir` that housed the satisfying condition.
> **Important:** Your `callback` can be a `Promise/AsyncFunction` when using the "async" version of `escalade`.
## Benchmarks
> Running on Node.js v10.13.0
```
# Load Time
find-up 3.891ms
escalade 0.485ms
escalade/sync 0.309ms
# Levels: 6 (target = "foo.txt"):
find-up x 24,856 ops/sec ±6.46% (55 runs sampled)
escalade x 73,084 ops/sec ±4.23% (73 runs sampled)
find-up.sync x 3,663 ops/sec ±1.12% (83 runs sampled)
escalade/sync x 9,360 ops/sec ±0.62% (88 runs sampled)
# Levels: 12 (target = "package.json"):
find-up x 29,300 ops/sec ±10.68% (70 runs sampled)
escalade x 73,685 ops/sec ± 5.66% (66 runs sampled)
find-up.sync x 1,707 ops/sec ± 0.58% (91 runs sampled)
escalade/sync x 4,667 ops/sec ± 0.68% (94 runs sampled)
# Levels: 18 (target = "missing123.txt"):
find-up x 21,818 ops/sec ±17.37% (14 runs sampled)
escalade x 67,101 ops/sec ±21.60% (20 runs sampled)
find-up.sync x 1,037 ops/sec ± 2.86% (88 runs sampled)
escalade/sync x 1,248 ops/sec ± 0.50% (93 runs sampled)
```
## Deno
As of v3.1.0, `escalade` is available on the Deno registry.
Please note that the [API](#api) is identical and that there are still [two modes](#modes) from which to choose:
```ts
// Choose "async" mode
import escalade from 'https://deno.land/escalade/async.ts';
// Choose "sync" mode
import escalade from 'https://deno.land/escalade/sync.ts';
```
> **Important:** The `allow-read` permission is required!
## Related
- [premove](https://github.com/lukeed/premove) - A tiny (247B) utility to remove items recursively
- [totalist](https://github.com/lukeed/totalist) - A tiny (195B to 224B) utility to recursively list all (total) files in a directory
- [mk-dirs](https://github.com/lukeed/mk-dirs) - A tiny (420B) utility to make a directory and its parents, recursively
## License
MIT © [Luke Edwards](https://lukeed.com)

9
Jira_helper/node_modules/escalade/sync/index.d.mts generated vendored Normal file
View File

@@ -0,0 +1,9 @@
export type Callback = (
directory: string,
files: string[],
) => string | false | void;
export default function (
directory: string,
callback: Callback,
): string | void;

13
Jira_helper/node_modules/escalade/sync/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,13 @@
declare namespace escalade {
export type Callback = (
directory: string,
files: string[],
) => string | false | void;
}
declare function escalade(
directory: string,
callback: escalade.Callback,
): string | void;
export = escalade;

18
Jira_helper/node_modules/escalade/sync/index.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
const { dirname, resolve } = require('path');
const { readdirSync, statSync } = require('fs');
module.exports = function (start, callback) {
let dir = resolve('.', start);
let tmp, stats = statSync(dir);
if (!stats.isDirectory()) {
dir = dirname(dir);
}
while (true) {
tmp = callback(dir, readdirSync(dir));
if (tmp) return resolve(dir, tmp);
dir = dirname(tmp = dir);
if (tmp === dir) break;
}
}

18
Jira_helper/node_modules/escalade/sync/index.mjs generated vendored Normal file
View File

@@ -0,0 +1,18 @@
import { dirname, resolve } from 'path';
import { readdirSync, statSync } from 'fs';
export default function (start, callback) {
let dir = resolve('.', start);
let tmp, stats = statSync(dir);
if (!stats.isDirectory()) {
dir = dirname(dir);
}
while (true) {
tmp = callback(dir, readdirSync(dir));
if (tmp) return resolve(dir, tmp);
dir = dirname(tmp = dir);
if (tmp === dir) break;
}
}

6
Jira_helper/node_modules/get-caller-file/LICENSE.md generated vendored Normal file
View File

@@ -0,0 +1,6 @@
ISC License (ISC)
Copyright 2018 Stefan Penner
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

41
Jira_helper/node_modules/get-caller-file/README.md generated vendored Normal file
View File

@@ -0,0 +1,41 @@
# get-caller-file
[![Build Status](https://travis-ci.org/stefanpenner/get-caller-file.svg?branch=master)](https://travis-ci.org/stefanpenner/get-caller-file)
[![Build status](https://ci.appveyor.com/api/projects/status/ol2q94g1932cy14a/branch/master?svg=true)](https://ci.appveyor.com/project/embercli/get-caller-file/branch/master)
This is a utility, which allows a function to figure out from which file it was invoked. It does so by inspecting v8's stack trace at the time it is invoked.
Inspired by http://stackoverflow.com/questions/13227489
*note: this relies on Node/V8 specific APIs, as such other runtimes may not work*
## Installation
```bash
yarn add get-caller-file
```
## Usage
Given:
```js
// ./foo.js
const getCallerFile = require('get-caller-file');
module.exports = function() {
return getCallerFile(); // figures out who called it
};
```
```js
// index.js
const foo = require('./foo');
foo() // => /full/path/to/this/file/index.js
```
## Options:
* `getCallerFile(position = 2)`: where position is stack frame whos fileName we want.

2
Jira_helper/node_modules/get-caller-file/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
declare const _default: (position?: number) => any;
export = _default;

22
Jira_helper/node_modules/get-caller-file/index.js generated vendored Normal file
View File

@@ -0,0 +1,22 @@
"use strict";
// Call this function in a another function to find out the file from
// which that function was called from. (Inspects the v8 stack trace)
//
// Inspired by http://stackoverflow.com/questions/13227489
module.exports = function getCallerFile(position) {
if (position === void 0) { position = 2; }
if (position >= Error.stackTraceLimit) {
throw new TypeError('getCallerFile(position) requires position be less then Error.stackTraceLimit but position was: `' + position + '` and Error.stackTraceLimit was: `' + Error.stackTraceLimit + '`');
}
var oldPrepareStackTrace = Error.prepareStackTrace;
Error.prepareStackTrace = function (_, stack) { return stack; };
var stack = new Error().stack;
Error.prepareStackTrace = oldPrepareStackTrace;
if (stack !== null && typeof stack === 'object') {
// stack[0] holds this file
// stack[1] holds where this function was called
// stack[2] holds the file we're interested in
return stack[position] ? stack[position].getFileName() : undefined;
}
};
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";AAAA,qEAAqE;AACrE,qEAAqE;AACrE,EAAE;AACF,0DAA0D;AAE1D,iBAAS,SAAS,aAAa,CAAC,QAAY;IAAZ,yBAAA,EAAA,YAAY;IAC1C,IAAI,QAAQ,IAAI,KAAK,CAAC,eAAe,EAAE;QACrC,MAAM,IAAI,SAAS,CAAC,kGAAkG,GAAG,QAAQ,GAAG,oCAAoC,GAAG,KAAK,CAAC,eAAe,GAAG,GAAG,CAAC,CAAC;KACzM;IAED,IAAM,oBAAoB,GAAG,KAAK,CAAC,iBAAiB,CAAC;IACrD,KAAK,CAAC,iBAAiB,GAAG,UAAC,CAAC,EAAE,KAAK,IAAM,OAAA,KAAK,EAAL,CAAK,CAAC;IAC/C,IAAM,KAAK,GAAG,IAAI,KAAK,EAAE,CAAC,KAAK,CAAC;IAChC,KAAK,CAAC,iBAAiB,GAAG,oBAAoB,CAAC;IAG/C,IAAI,KAAK,KAAK,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC/C,2BAA2B;QAC3B,gDAAgD;QAChD,8CAA8C;QAC9C,OAAO,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAE,KAAK,CAAC,QAAQ,CAAS,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;KAC7E;AACH,CAAC,CAAC"}

42
Jira_helper/node_modules/get-caller-file/package.json generated vendored Normal file
View File

@@ -0,0 +1,42 @@
{
"name": "get-caller-file",
"version": "2.0.5",
"description": "",
"main": "index.js",
"directories": {
"test": "tests"
},
"files": [
"index.js",
"index.js.map",
"index.d.ts"
],
"scripts": {
"prepare": "tsc",
"test": "mocha test",
"test:debug": "mocha test"
},
"repository": {
"type": "git",
"url": "git+https://github.com/stefanpenner/get-caller-file.git"
},
"author": "Stefan Penner",
"license": "ISC",
"bugs": {
"url": "https://github.com/stefanpenner/get-caller-file/issues"
},
"homepage": "https://github.com/stefanpenner/get-caller-file#readme",
"devDependencies": {
"@types/chai": "^4.1.7",
"@types/ensure-posix-path": "^1.0.0",
"@types/mocha": "^5.2.6",
"@types/node": "^11.10.5",
"chai": "^4.1.2",
"ensure-posix-path": "^1.0.1",
"mocha": "^5.2.0",
"typescript": "^3.3.3333"
},
"engines": {
"node": "6.* || 8.* || >= 10.*"
}
}

View File

@@ -0,0 +1,60 @@
export type WidthType = 'fullwidth' | 'halfwidth' | 'wide' | 'narrow' | 'neutral' | 'ambiguous';
export type Options = {
/**
Whether to treat an `'ambiguous'` character as wide.
@default true
@example
```
import {eastAsianWidth} from 'get-east-asian-width';
const codePoint = '⛣'.codePointAt(0);
console.log(eastAsianWidth(codePoint));
//=> 1
console.log(eastAsianWidth(codePoint, {ambiguousAsWide: true}));
//=> 2
```
> Ambiguous characters behave like wide or narrow characters depending on the context (language tag, script identification, associated font, source of data, or explicit markup; all can provide the context). __If the context cannot be established reliably, they should be treated as narrow characters by default.__
> - http://www.unicode.org/reports/tr11/
*/
readonly ambiguousAsWide?: boolean;
};
/**
Returns the width as a number for the given code point.
@param codePoint - A Unicode code point.
@example
```
import {eastAsianWidth} from 'get-east-asian-width';
const codePoint = '字'.codePointAt(0);
console.log(eastAsianWidth(codePoint));
//=> 2
```
*/
export function eastAsianWidth(codePoint: number, options?: Options): 1 | 2;
/**
Returns the type of “East Asian Width” for the given code point.
@param codePoint - A Unicode code point.
@example
```
import {eastAsianWidthType} from 'get-east-asian-width';
const codePoint = '字'.codePointAt(0);
console.log(eastAsianWidthType(codePoint));
//=> 'wide'
```
*/
export function eastAsianWidthType(codePoint: number): WidthType;

30
Jira_helper/node_modules/get-east-asian-width/index.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
import {getCategory, isAmbiguous, isFullWidth, isWide} from './lookup.js';
function validate(codePoint) {
if (!Number.isSafeInteger(codePoint)) {
throw new TypeError(`Expected a code point, got \`${typeof codePoint}\`.`);
}
}
export function eastAsianWidthType(codePoint) {
validate(codePoint);
return getCategory(codePoint);
}
export function eastAsianWidth(codePoint, {ambiguousAsWide = false} = {}) {
validate(codePoint);
if (
isFullWidth(codePoint)
|| isWide(codePoint)
|| (ambiguousAsWide && isAmbiguous(codePoint))
) {
return 2;
}
return 1;
}
// Private exports for https://github.com/sindresorhus/is-fullwidth-code-point
export {isFullWidth as _isFullWidth, isWide as _isWide} from './lookup.js';

View File

@@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

403
Jira_helper/node_modules/get-east-asian-width/lookup.js generated vendored Normal file
View File

@@ -0,0 +1,403 @@
// Generated code.
function isAmbiguous(x) {
return x === 0xA1
|| x === 0xA4
|| x === 0xA7
|| x === 0xA8
|| x === 0xAA
|| x === 0xAD
|| x === 0xAE
|| x >= 0xB0 && x <= 0xB4
|| x >= 0xB6 && x <= 0xBA
|| x >= 0xBC && x <= 0xBF
|| x === 0xC6
|| x === 0xD0
|| x === 0xD7
|| x === 0xD8
|| x >= 0xDE && x <= 0xE1
|| x === 0xE6
|| x >= 0xE8 && x <= 0xEA
|| x === 0xEC
|| x === 0xED
|| x === 0xF0
|| x === 0xF2
|| x === 0xF3
|| x >= 0xF7 && x <= 0xFA
|| x === 0xFC
|| x === 0xFE
|| x === 0x101
|| x === 0x111
|| x === 0x113
|| x === 0x11B
|| x === 0x126
|| x === 0x127
|| x === 0x12B
|| x >= 0x131 && x <= 0x133
|| x === 0x138
|| x >= 0x13F && x <= 0x142
|| x === 0x144
|| x >= 0x148 && x <= 0x14B
|| x === 0x14D
|| x === 0x152
|| x === 0x153
|| x === 0x166
|| x === 0x167
|| x === 0x16B
|| x === 0x1CE
|| x === 0x1D0
|| x === 0x1D2
|| x === 0x1D4
|| x === 0x1D6
|| x === 0x1D8
|| x === 0x1DA
|| x === 0x1DC
|| x === 0x251
|| x === 0x261
|| x === 0x2C4
|| x === 0x2C7
|| x >= 0x2C9 && x <= 0x2CB
|| x === 0x2CD
|| x === 0x2D0
|| x >= 0x2D8 && x <= 0x2DB
|| x === 0x2DD
|| x === 0x2DF
|| x >= 0x300 && x <= 0x36F
|| x >= 0x391 && x <= 0x3A1
|| x >= 0x3A3 && x <= 0x3A9
|| x >= 0x3B1 && x <= 0x3C1
|| x >= 0x3C3 && x <= 0x3C9
|| x === 0x401
|| x >= 0x410 && x <= 0x44F
|| x === 0x451
|| x === 0x2010
|| x >= 0x2013 && x <= 0x2016
|| x === 0x2018
|| x === 0x2019
|| x === 0x201C
|| x === 0x201D
|| x >= 0x2020 && x <= 0x2022
|| x >= 0x2024 && x <= 0x2027
|| x === 0x2030
|| x === 0x2032
|| x === 0x2033
|| x === 0x2035
|| x === 0x203B
|| x === 0x203E
|| x === 0x2074
|| x === 0x207F
|| x >= 0x2081 && x <= 0x2084
|| x === 0x20AC
|| x === 0x2103
|| x === 0x2105
|| x === 0x2109
|| x === 0x2113
|| x === 0x2116
|| x === 0x2121
|| x === 0x2122
|| x === 0x2126
|| x === 0x212B
|| x === 0x2153
|| x === 0x2154
|| x >= 0x215B && x <= 0x215E
|| x >= 0x2160 && x <= 0x216B
|| x >= 0x2170 && x <= 0x2179
|| x === 0x2189
|| x >= 0x2190 && x <= 0x2199
|| x === 0x21B8
|| x === 0x21B9
|| x === 0x21D2
|| x === 0x21D4
|| x === 0x21E7
|| x === 0x2200
|| x === 0x2202
|| x === 0x2203
|| x === 0x2207
|| x === 0x2208
|| x === 0x220B
|| x === 0x220F
|| x === 0x2211
|| x === 0x2215
|| x === 0x221A
|| x >= 0x221D && x <= 0x2220
|| x === 0x2223
|| x === 0x2225
|| x >= 0x2227 && x <= 0x222C
|| x === 0x222E
|| x >= 0x2234 && x <= 0x2237
|| x === 0x223C
|| x === 0x223D
|| x === 0x2248
|| x === 0x224C
|| x === 0x2252
|| x === 0x2260
|| x === 0x2261
|| x >= 0x2264 && x <= 0x2267
|| x === 0x226A
|| x === 0x226B
|| x === 0x226E
|| x === 0x226F
|| x === 0x2282
|| x === 0x2283
|| x === 0x2286
|| x === 0x2287
|| x === 0x2295
|| x === 0x2299
|| x === 0x22A5
|| x === 0x22BF
|| x === 0x2312
|| x >= 0x2460 && x <= 0x24E9
|| x >= 0x24EB && x <= 0x254B
|| x >= 0x2550 && x <= 0x2573
|| x >= 0x2580 && x <= 0x258F
|| x >= 0x2592 && x <= 0x2595
|| x === 0x25A0
|| x === 0x25A1
|| x >= 0x25A3 && x <= 0x25A9
|| x === 0x25B2
|| x === 0x25B3
|| x === 0x25B6
|| x === 0x25B7
|| x === 0x25BC
|| x === 0x25BD
|| x === 0x25C0
|| x === 0x25C1
|| x >= 0x25C6 && x <= 0x25C8
|| x === 0x25CB
|| x >= 0x25CE && x <= 0x25D1
|| x >= 0x25E2 && x <= 0x25E5
|| x === 0x25EF
|| x === 0x2605
|| x === 0x2606
|| x === 0x2609
|| x === 0x260E
|| x === 0x260F
|| x === 0x261C
|| x === 0x261E
|| x === 0x2640
|| x === 0x2642
|| x === 0x2660
|| x === 0x2661
|| x >= 0x2663 && x <= 0x2665
|| x >= 0x2667 && x <= 0x266A
|| x === 0x266C
|| x === 0x266D
|| x === 0x266F
|| x === 0x269E
|| x === 0x269F
|| x === 0x26BF
|| x >= 0x26C6 && x <= 0x26CD
|| x >= 0x26CF && x <= 0x26D3
|| x >= 0x26D5 && x <= 0x26E1
|| x === 0x26E3
|| x === 0x26E8
|| x === 0x26E9
|| x >= 0x26EB && x <= 0x26F1
|| x === 0x26F4
|| x >= 0x26F6 && x <= 0x26F9
|| x === 0x26FB
|| x === 0x26FC
|| x === 0x26FE
|| x === 0x26FF
|| x === 0x273D
|| x >= 0x2776 && x <= 0x277F
|| x >= 0x2B56 && x <= 0x2B59
|| x >= 0x3248 && x <= 0x324F
|| x >= 0xE000 && x <= 0xF8FF
|| x >= 0xFE00 && x <= 0xFE0F
|| x === 0xFFFD
|| x >= 0x1F100 && x <= 0x1F10A
|| x >= 0x1F110 && x <= 0x1F12D
|| x >= 0x1F130 && x <= 0x1F169
|| x >= 0x1F170 && x <= 0x1F18D
|| x === 0x1F18F
|| x === 0x1F190
|| x >= 0x1F19B && x <= 0x1F1AC
|| x >= 0xE0100 && x <= 0xE01EF
|| x >= 0xF0000 && x <= 0xFFFFD
|| x >= 0x100000 && x <= 0x10FFFD;
}
function isFullWidth(x) {
return x === 0x3000
|| x >= 0xFF01 && x <= 0xFF60
|| x >= 0xFFE0 && x <= 0xFFE6;
}
function isWide(x) {
return x >= 0x1100 && x <= 0x115F
|| x === 0x231A
|| x === 0x231B
|| x === 0x2329
|| x === 0x232A
|| x >= 0x23E9 && x <= 0x23EC
|| x === 0x23F0
|| x === 0x23F3
|| x === 0x25FD
|| x === 0x25FE
|| x === 0x2614
|| x === 0x2615
|| x >= 0x2630 && x <= 0x2637
|| x >= 0x2648 && x <= 0x2653
|| x === 0x267F
|| x >= 0x268A && x <= 0x268F
|| x === 0x2693
|| x === 0x26A1
|| x === 0x26AA
|| x === 0x26AB
|| x === 0x26BD
|| x === 0x26BE
|| x === 0x26C4
|| x === 0x26C5
|| x === 0x26CE
|| x === 0x26D4
|| x === 0x26EA
|| x === 0x26F2
|| x === 0x26F3
|| x === 0x26F5
|| x === 0x26FA
|| x === 0x26FD
|| x === 0x2705
|| x === 0x270A
|| x === 0x270B
|| x === 0x2728
|| x === 0x274C
|| x === 0x274E
|| x >= 0x2753 && x <= 0x2755
|| x === 0x2757
|| x >= 0x2795 && x <= 0x2797
|| x === 0x27B0
|| x === 0x27BF
|| x === 0x2B1B
|| x === 0x2B1C
|| x === 0x2B50
|| x === 0x2B55
|| x >= 0x2E80 && x <= 0x2E99
|| x >= 0x2E9B && x <= 0x2EF3
|| x >= 0x2F00 && x <= 0x2FD5
|| x >= 0x2FF0 && x <= 0x2FFF
|| x >= 0x3001 && x <= 0x303E
|| x >= 0x3041 && x <= 0x3096
|| x >= 0x3099 && x <= 0x30FF
|| x >= 0x3105 && x <= 0x312F
|| x >= 0x3131 && x <= 0x318E
|| x >= 0x3190 && x <= 0x31E5
|| x >= 0x31EF && x <= 0x321E
|| x >= 0x3220 && x <= 0x3247
|| x >= 0x3250 && x <= 0xA48C
|| x >= 0xA490 && x <= 0xA4C6
|| x >= 0xA960 && x <= 0xA97C
|| x >= 0xAC00 && x <= 0xD7A3
|| x >= 0xF900 && x <= 0xFAFF
|| x >= 0xFE10 && x <= 0xFE19
|| x >= 0xFE30 && x <= 0xFE52
|| x >= 0xFE54 && x <= 0xFE66
|| x >= 0xFE68 && x <= 0xFE6B
|| x >= 0x16FE0 && x <= 0x16FE4
|| x >= 0x16FF0 && x <= 0x16FF6
|| x >= 0x17000 && x <= 0x18CD5
|| x >= 0x18CFF && x <= 0x18D1E
|| x >= 0x18D80 && x <= 0x18DF2
|| x >= 0x1AFF0 && x <= 0x1AFF3
|| x >= 0x1AFF5 && x <= 0x1AFFB
|| x === 0x1AFFD
|| x === 0x1AFFE
|| x >= 0x1B000 && x <= 0x1B122
|| x === 0x1B132
|| x >= 0x1B150 && x <= 0x1B152
|| x === 0x1B155
|| x >= 0x1B164 && x <= 0x1B167
|| x >= 0x1B170 && x <= 0x1B2FB
|| x >= 0x1D300 && x <= 0x1D356
|| x >= 0x1D360 && x <= 0x1D376
|| x === 0x1F004
|| x === 0x1F0CF
|| x === 0x1F18E
|| x >= 0x1F191 && x <= 0x1F19A
|| x >= 0x1F200 && x <= 0x1F202
|| x >= 0x1F210 && x <= 0x1F23B
|| x >= 0x1F240 && x <= 0x1F248
|| x === 0x1F250
|| x === 0x1F251
|| x >= 0x1F260 && x <= 0x1F265
|| x >= 0x1F300 && x <= 0x1F320
|| x >= 0x1F32D && x <= 0x1F335
|| x >= 0x1F337 && x <= 0x1F37C
|| x >= 0x1F37E && x <= 0x1F393
|| x >= 0x1F3A0 && x <= 0x1F3CA
|| x >= 0x1F3CF && x <= 0x1F3D3
|| x >= 0x1F3E0 && x <= 0x1F3F0
|| x === 0x1F3F4
|| x >= 0x1F3F8 && x <= 0x1F43E
|| x === 0x1F440
|| x >= 0x1F442 && x <= 0x1F4FC
|| x >= 0x1F4FF && x <= 0x1F53D
|| x >= 0x1F54B && x <= 0x1F54E
|| x >= 0x1F550 && x <= 0x1F567
|| x === 0x1F57A
|| x === 0x1F595
|| x === 0x1F596
|| x === 0x1F5A4
|| x >= 0x1F5FB && x <= 0x1F64F
|| x >= 0x1F680 && x <= 0x1F6C5
|| x === 0x1F6CC
|| x >= 0x1F6D0 && x <= 0x1F6D2
|| x >= 0x1F6D5 && x <= 0x1F6D8
|| x >= 0x1F6DC && x <= 0x1F6DF
|| x === 0x1F6EB
|| x === 0x1F6EC
|| x >= 0x1F6F4 && x <= 0x1F6FC
|| x >= 0x1F7E0 && x <= 0x1F7EB
|| x === 0x1F7F0
|| x >= 0x1F90C && x <= 0x1F93A
|| x >= 0x1F93C && x <= 0x1F945
|| x >= 0x1F947 && x <= 0x1F9FF
|| x >= 0x1FA70 && x <= 0x1FA7C
|| x >= 0x1FA80 && x <= 0x1FA8A
|| x >= 0x1FA8E && x <= 0x1FAC6
|| x === 0x1FAC8
|| x >= 0x1FACD && x <= 0x1FADC
|| x >= 0x1FADF && x <= 0x1FAEA
|| x >= 0x1FAEF && x <= 0x1FAF8
|| x >= 0x20000 && x <= 0x2FFFD
|| x >= 0x30000 && x <= 0x3FFFD;
}
function getCategory(x) {
if (isAmbiguous(x)) return 'ambiguous';
if (isFullWidth(x)) return 'fullwidth';
if (
x === 0x20A9
|| x >= 0xFF61 && x <= 0xFFBE
|| x >= 0xFFC2 && x <= 0xFFC7
|| x >= 0xFFCA && x <= 0xFFCF
|| x >= 0xFFD2 && x <= 0xFFD7
|| x >= 0xFFDA && x <= 0xFFDC
|| x >= 0xFFE8 && x <= 0xFFEE
) {
return 'halfwidth';
}
if (
x >= 0x20 && x <= 0x7E
|| x === 0xA2
|| x === 0xA3
|| x === 0xA5
|| x === 0xA6
|| x === 0xAC
|| x === 0xAF
|| x >= 0x27E6 && x <= 0x27ED
|| x === 0x2985
|| x === 0x2986
) {
return 'narrow';
}
if (isWide(x)) return 'wide';
return 'neutral';
}
export {isAmbiguous, isFullWidth, isWide, getCategory};

View File

@@ -0,0 +1,70 @@
{
"name": "get-east-asian-width",
"version": "1.4.0",
"description": "Determine the East Asian Width of a Unicode character",
"license": "MIT",
"repository": "sindresorhus/get-east-asian-width",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": {
"types": "./index.d.ts",
"default": "./index.js"
},
"sideEffects": false,
"engines": {
"node": ">=18"
},
"scripts": {
"test": "xo && ava && tsc index.d.ts",
"build": "node scripts/build.js",
"prepublish": "npm run build"
},
"files": [
"index.js",
"index.d.ts",
"lookup.js"
],
"keywords": [
"unicode",
"east-asian-width",
"eastasianwidth",
"character",
"string",
"width",
"text",
"layout",
"alignment",
"fullwidth",
"halfwidth",
"ambiguous",
"narrow",
"wide",
"neutral",
"typography",
"japanese",
"chinese",
"korean",
"codepoint",
"text-processing",
"i18n",
"l10n"
],
"devDependencies": {
"ava": "^5.3.1",
"indent-string": "^5.0.0",
"outdent": "^0.8.0",
"simplify-ranges": "^0.1.0",
"typescript": "^5.2.2",
"xo": "^0.56.0"
},
"xo": {
"ignores": [
"lookup.js"
]
}
}

View File

@@ -0,0 +1,65 @@
# get-east-asian-width
> Determine the [East Asian Width](https://unicode.org/reports/tr11/) of a Unicode character
> East Asian Width categorizes Unicode characters based on their occupied space in East Asian typography, which helps in text layout and alignment, particularly in languages like Japanese, Chinese, and Korean.
Unlike other similar packages, this package uses the latest Unicode data (which changes each year).
## Install
```sh
npm install get-east-asian-width
```
## Usage
```js
import {eastAsianWidth, eastAsianWidthType} from 'get-east-asian-width';
const codePoint = '字'.codePointAt(0);
console.log(eastAsianWidth(codePoint));
//=> 2
console.log(eastAsianWidthType(codePoint));
//=> 'wide'
```
## `eastAsianWidth(codePoint: number, options?: object): 1 | 2`
Returns the width as a number for the given code point.
### options
Type: `object`
#### ambiguousAsWide
Type: `boolean`\
Default: `false`
Whether to treat an `'ambiguous'` character as wide.
```js
import {eastAsianWidth} from 'get-east-asian-width';
const codePoint = '⛣'.codePointAt(0);
console.log(eastAsianWidth(codePoint));
//=> 1
console.log(eastAsianWidth(codePoint, {ambiguousAsWide: true}));
//=> 2
```
> Ambiguous characters behave like wide or narrow characters depending on the context (language tag, script identification, associated font, source of data, or explicit markup; all can provide the context). **If the context cannot be established reliably, they should be treated as narrow characters by default.**
> - http://www.unicode.org/reports/tr11/
## `eastAsianWidthType(codePoint: number): 'fullwidth' | 'halfwidth' | 'wide' | 'narrow' | 'neutral' | 'ambiguous'`
Returns the type of “East Asian Width” for the given code point.
## Related
- [string-width](https://github.com/sindresorhus/string-width) - Get the visual width of a string

39
Jira_helper/node_modules/string-width/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,39 @@
export type Options = {
/**
Count [ambiguous width characters](https://www.unicode.org/reports/tr11/#Ambiguous) as having narrow width (count of 1) instead of wide width (count of 2).
@default true
> Ambiguous characters behave like wide or narrow characters depending on the context (language tag, script identification, associated font, source of data, or explicit markup; all can provide the context). __If the context cannot be established reliably, they should be treated as narrow characters by default.__
> - http://www.unicode.org/reports/tr11/
*/
readonly ambiguousIsNarrow?: boolean;
/**
Whether [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) should be counted.
@default false
*/
readonly countAnsiEscapeCodes?: boolean;
};
/**
Get the visual width of a string - the number of columns required to display it.
Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width.
@example
```
import stringWidth from 'string-width';
stringWidth('a');
//=> 1
stringWidth('古');
//=> 2
stringWidth('\u001B[1m古\u001B[22m');
//=> 2
```
*/
export default function stringWidth(string: string, options?: Options): number;

82
Jira_helper/node_modules/string-width/index.js generated vendored Normal file
View File

@@ -0,0 +1,82 @@
import stripAnsi from 'strip-ansi';
import {eastAsianWidth} from 'get-east-asian-width';
import emojiRegex from 'emoji-regex';
const segmenter = new Intl.Segmenter();
const defaultIgnorableCodePointRegex = /^\p{Default_Ignorable_Code_Point}$/u;
export default function stringWidth(string, options = {}) {
if (typeof string !== 'string' || string.length === 0) {
return 0;
}
const {
ambiguousIsNarrow = true,
countAnsiEscapeCodes = false,
} = options;
if (!countAnsiEscapeCodes) {
string = stripAnsi(string);
}
if (string.length === 0) {
return 0;
}
let width = 0;
const eastAsianWidthOptions = {ambiguousAsWide: !ambiguousIsNarrow};
for (const {segment: character} of segmenter.segment(string)) {
const codePoint = character.codePointAt(0);
// Ignore control characters
if (codePoint <= 0x1F || (codePoint >= 0x7F && codePoint <= 0x9F)) {
continue;
}
// Ignore zero-width characters
if (
(codePoint >= 0x20_0B && codePoint <= 0x20_0F) // Zero-width space, non-joiner, joiner, left-to-right mark, right-to-left mark
|| codePoint === 0xFE_FF // Zero-width no-break space
) {
continue;
}
// Ignore combining characters
if (
(codePoint >= 0x3_00 && codePoint <= 0x3_6F) // Combining diacritical marks
|| (codePoint >= 0x1A_B0 && codePoint <= 0x1A_FF) // Combining diacritical marks extended
|| (codePoint >= 0x1D_C0 && codePoint <= 0x1D_FF) // Combining diacritical marks supplement
|| (codePoint >= 0x20_D0 && codePoint <= 0x20_FF) // Combining diacritical marks for symbols
|| (codePoint >= 0xFE_20 && codePoint <= 0xFE_2F) // Combining half marks
) {
continue;
}
// Ignore surrogate pairs
if (codePoint >= 0xD8_00 && codePoint <= 0xDF_FF) {
continue;
}
// Ignore variation selectors
if (codePoint >= 0xFE_00 && codePoint <= 0xFE_0F) {
continue;
}
// This covers some of the above cases, but we still keep them for performance reasons.
if (defaultIgnorableCodePointRegex.test(character)) {
continue;
}
// TODO: Use `/\p{RGI_Emoji}/v` when targeting Node.js 20.
if (emojiRegex().test(character)) {
width += 2;
continue;
}
width += eastAsianWidth(codePoint, eastAsianWidthOptions);
}
return width;
}

9
Jira_helper/node_modules/string-width/license generated vendored Normal file
View File

@@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

64
Jira_helper/node_modules/string-width/package.json generated vendored Normal file
View File

@@ -0,0 +1,64 @@
{
"name": "string-width",
"version": "7.2.0",
"description": "Get the visual width of a string - the number of columns required to display it",
"license": "MIT",
"repository": "sindresorhus/string-width",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": {
"types": "./index.d.ts",
"default": "./index.js"
},
"sideEffects": false,
"engines": {
"node": ">=18"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"string",
"character",
"unicode",
"width",
"visual",
"column",
"columns",
"fullwidth",
"full-width",
"full",
"ansi",
"escape",
"codes",
"cli",
"command-line",
"terminal",
"console",
"cjk",
"chinese",
"japanese",
"korean",
"fixed-width",
"east-asian-width"
],
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"devDependencies": {
"ava": "^5.3.1",
"tsd": "^0.29.0",
"xo": "^0.56.0"
}
}

66
Jira_helper/node_modules/string-width/readme.md generated vendored Normal file
View File

@@ -0,0 +1,66 @@
# string-width
> Get the visual width of a string - the number of columns required to display it
Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width.
Useful to be able to measure the actual width of command-line output.
## Install
```sh
npm install string-width
```
## Usage
```js
import stringWidth from 'string-width';
stringWidth('a');
//=> 1
stringWidth('古');
//=> 2
stringWidth('\u001B[1m古\u001B[22m');
//=> 2
```
## API
### stringWidth(string, options?)
#### string
Type: `string`
The string to be counted.
#### options
Type: `object`
##### ambiguousIsNarrow
Type: `boolean`\
Default: `true`
Count [ambiguous width characters](https://www.unicode.org/reports/tr11/#Ambiguous) as having narrow width (count of 1) instead of wide width (count of 2).
> Ambiguous characters behave like wide or narrow characters depending on the context (language tag, script identification, associated font, source of data, or explicit markup; all can provide the context). **If the context cannot be established reliably, they should be treated as narrow characters by default.**
> - http://www.unicode.org/reports/tr11/
##### countAnsiEscapeCodes
Type: `boolean`\
Default: `false`
Whether [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) should be counted.
## Related
- [string-width-cli](https://github.com/sindresorhus/string-width-cli) - CLI for this module
- [string-length](https://github.com/sindresorhus/string-length) - Get the real length of a string
- [widest-line](https://github.com/sindresorhus/widest-line) - Get the visual width of the widest line in a string
- [get-east-asian-width](https://github.com/sindresorhus/get-east-asian-width) - Determine the East Asian Width of a Unicode character

15
Jira_helper/node_modules/strip-ansi/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,15 @@
/**
Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) from a string.
@example
```
import stripAnsi from 'strip-ansi';
stripAnsi('\u001B[4mUnicorn\u001B[0m');
//=> 'Unicorn'
stripAnsi('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007');
//=> 'Click'
```
*/
export default function stripAnsi(string: string): string;

14
Jira_helper/node_modules/strip-ansi/index.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
import ansiRegex from 'ansi-regex';
const regex = ansiRegex();
export default function stripAnsi(string) {
if (typeof string !== 'string') {
throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``);
}
// Even though the regex is global, we don't need to reset the `.lastIndex`
// because unlike `.exec()` and `.test()`, `.replace()` does it automatically
// and doing it manually has a performance penalty.
return string.replace(regex, '');
}

9
Jira_helper/node_modules/strip-ansi/license generated vendored Normal file
View File

@@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

59
Jira_helper/node_modules/strip-ansi/package.json generated vendored Normal file
View File

@@ -0,0 +1,59 @@
{
"name": "strip-ansi",
"version": "7.1.2",
"description": "Strip ANSI escape codes from a string",
"license": "MIT",
"repository": "chalk/strip-ansi",
"funding": "https://github.com/chalk/strip-ansi?sponsor=1",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": "./index.js",
"types": "./index.d.ts",
"sideEffects": false,
"engines": {
"node": ">=12"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"strip",
"trim",
"remove",
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"log",
"logging",
"command-line",
"text"
],
"dependencies": {
"ansi-regex": "^6.0.1"
},
"devDependencies": {
"ava": "^3.15.0",
"tsd": "^0.17.0",
"xo": "^0.44.0"
}
}

37
Jira_helper/node_modules/strip-ansi/readme.md generated vendored Normal file
View File

@@ -0,0 +1,37 @@
# strip-ansi
> Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) from a string
> [!NOTE]
> Node.js has this built-in now with [`stripVTControlCharacters`](https://nodejs.org/api/util.html#utilstripvtcontrolcharactersstr). The benefit of this package is consistent behavior across Node.js versions and faster improvements. The Node.js version is actually based on this package.
## Install
```sh
npm install strip-ansi
```
## Usage
```js
import stripAnsi from 'strip-ansi';
stripAnsi('\u001B[4mUnicorn\u001B[0m');
//=> 'Unicorn'
stripAnsi('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007');
//=> 'Click'
```
## Related
- [strip-ansi-cli](https://github.com/chalk/strip-ansi-cli) - CLI for this module
- [strip-ansi-stream](https://github.com/chalk/strip-ansi-stream) - Streaming version of this module
- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes
- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes
- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right
## Maintainers
- [Sindre Sorhus](https://github.com/sindresorhus)
- [Josh Junon](https://github.com/qix-)

41
Jira_helper/node_modules/wrap-ansi/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,41 @@
export type Options = {
/**
By default the wrap is soft, meaning long words may extend past the column width. Setting this to `true` will make it hard wrap at the column width.
@default false
*/
readonly hard?: boolean;
/**
By default, an attempt is made to split words at spaces, ensuring that they don't extend past the configured columns. If wordWrap is `false`, each column will instead be completely filled splitting words as necessary.
@default true
*/
readonly wordWrap?: boolean;
/**
Whitespace on all lines is removed by default. Set this option to `false` if you don't want to trim.
@default true
*/
readonly trim?: boolean;
};
/**
Wrap words to the specified column width.
@param string - A string with ANSI escape codes, like one styled by [`chalk`](https://github.com/chalk/chalk). Newline characters will be normalized to `\n`.
@param columns - The number of columns to wrap the text to.
@example
```
import chalk from 'chalk';
import wrapAnsi from 'wrap-ansi';
const input = 'The quick brown ' + chalk.red('fox jumped over ') +
'the lazy ' + chalk.green('dog and then ran away with the unicorn.');
console.log(wrapAnsi(input, 20));
```
*/
export default function wrapAnsi(string: string, columns: number, options?: Options): string;

222
Jira_helper/node_modules/wrap-ansi/index.js generated vendored Executable file
View File

@@ -0,0 +1,222 @@
import stringWidth from 'string-width';
import stripAnsi from 'strip-ansi';
import ansiStyles from 'ansi-styles';
const ESCAPES = new Set([
'\u001B',
'\u009B',
]);
const END_CODE = 39;
const ANSI_ESCAPE_BELL = '\u0007';
const ANSI_CSI = '[';
const ANSI_OSC = ']';
const ANSI_SGR_TERMINATOR = 'm';
const ANSI_ESCAPE_LINK = `${ANSI_OSC}8;;`;
const wrapAnsiCode = code => `${ESCAPES.values().next().value}${ANSI_CSI}${code}${ANSI_SGR_TERMINATOR}`;
const wrapAnsiHyperlink = url => `${ESCAPES.values().next().value}${ANSI_ESCAPE_LINK}${url}${ANSI_ESCAPE_BELL}`;
// Calculate the length of words split on ' ', ignoring
// the extra characters added by ansi escape codes
const wordLengths = string => string.split(' ').map(character => stringWidth(character));
// Wrap a long word across multiple rows
// Ansi escape codes do not count towards length
const wrapWord = (rows, word, columns) => {
const characters = [...word];
let isInsideEscape = false;
let isInsideLinkEscape = false;
let visible = stringWidth(stripAnsi(rows.at(-1)));
for (const [index, character] of characters.entries()) {
const characterLength = stringWidth(character);
if (visible + characterLength <= columns) {
rows[rows.length - 1] += character;
} else {
rows.push(character);
visible = 0;
}
if (ESCAPES.has(character)) {
isInsideEscape = true;
const ansiEscapeLinkCandidate = characters.slice(index + 1, index + 1 + ANSI_ESCAPE_LINK.length).join('');
isInsideLinkEscape = ansiEscapeLinkCandidate === ANSI_ESCAPE_LINK;
}
if (isInsideEscape) {
if (isInsideLinkEscape) {
if (character === ANSI_ESCAPE_BELL) {
isInsideEscape = false;
isInsideLinkEscape = false;
}
} else if (character === ANSI_SGR_TERMINATOR) {
isInsideEscape = false;
}
continue;
}
visible += characterLength;
if (visible === columns && index < characters.length - 1) {
rows.push('');
visible = 0;
}
}
// It's possible that the last row we copy over is only
// ansi escape characters, handle this edge-case
if (!visible && rows.at(-1).length > 0 && rows.length > 1) {
rows[rows.length - 2] += rows.pop();
}
};
// Trims spaces from a string ignoring invisible sequences
const stringVisibleTrimSpacesRight = string => {
const words = string.split(' ');
let last = words.length;
while (last > 0) {
if (stringWidth(words[last - 1]) > 0) {
break;
}
last--;
}
if (last === words.length) {
return string;
}
return words.slice(0, last).join(' ') + words.slice(last).join('');
};
// The wrap-ansi module can be invoked in either 'hard' or 'soft' wrap mode.
//
// 'hard' will never allow a string to take up more than columns characters.
//
// 'soft' allows long words to expand past the column length.
const exec = (string, columns, options = {}) => {
if (options.trim !== false && string.trim() === '') {
return '';
}
let returnValue = '';
let escapeCode;
let escapeUrl;
const lengths = wordLengths(string);
let rows = [''];
for (const [index, word] of string.split(' ').entries()) {
if (options.trim !== false) {
rows[rows.length - 1] = rows.at(-1).trimStart();
}
let rowLength = stringWidth(rows.at(-1));
if (index !== 0) {
if (rowLength >= columns && (options.wordWrap === false || options.trim === false)) {
// If we start with a new word but the current row length equals the length of the columns, add a new row
rows.push('');
rowLength = 0;
}
if (rowLength > 0 || options.trim === false) {
rows[rows.length - 1] += ' ';
rowLength++;
}
}
// In 'hard' wrap mode, the length of a line is never allowed to extend past 'columns'
if (options.hard && lengths[index] > columns) {
const remainingColumns = (columns - rowLength);
const breaksStartingThisLine = 1 + Math.floor((lengths[index] - remainingColumns - 1) / columns);
const breaksStartingNextLine = Math.floor((lengths[index] - 1) / columns);
if (breaksStartingNextLine < breaksStartingThisLine) {
rows.push('');
}
wrapWord(rows, word, columns);
continue;
}
if (rowLength + lengths[index] > columns && rowLength > 0 && lengths[index] > 0) {
if (options.wordWrap === false && rowLength < columns) {
wrapWord(rows, word, columns);
continue;
}
rows.push('');
}
if (rowLength + lengths[index] > columns && options.wordWrap === false) {
wrapWord(rows, word, columns);
continue;
}
rows[rows.length - 1] += word;
}
if (options.trim !== false) {
rows = rows.map(row => stringVisibleTrimSpacesRight(row));
}
const preString = rows.join('\n');
const pre = [...preString];
// We need to keep a separate index as `String#slice()` works on Unicode code units, while `pre` is an array of codepoints.
let preStringIndex = 0;
for (const [index, character] of pre.entries()) {
returnValue += character;
if (ESCAPES.has(character)) {
const {groups} = new RegExp(`(?:\\${ANSI_CSI}(?<code>\\d+)m|\\${ANSI_ESCAPE_LINK}(?<uri>.*)${ANSI_ESCAPE_BELL})`).exec(preString.slice(preStringIndex)) || {groups: {}};
if (groups.code !== undefined) {
const code = Number.parseFloat(groups.code);
escapeCode = code === END_CODE ? undefined : code;
} else if (groups.uri !== undefined) {
escapeUrl = groups.uri.length === 0 ? undefined : groups.uri;
}
}
const code = ansiStyles.codes.get(Number(escapeCode));
if (pre[index + 1] === '\n') {
if (escapeUrl) {
returnValue += wrapAnsiHyperlink('');
}
if (escapeCode && code) {
returnValue += wrapAnsiCode(code);
}
} else if (character === '\n') {
if (escapeCode && code) {
returnValue += wrapAnsiCode(escapeCode);
}
if (escapeUrl) {
returnValue += wrapAnsiHyperlink(escapeUrl);
}
}
preStringIndex += character.length;
}
return returnValue;
};
// For each newline, invoke the method separately
export default function wrapAnsi(string, columns, options) {
return String(string)
.normalize()
.replaceAll('\r\n', '\n')
.split('\n')
.map(line => exec(line, columns, options))
.join('\n');
}

9
Jira_helper/node_modules/wrap-ansi/license generated vendored Normal file
View File

@@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

69
Jira_helper/node_modules/wrap-ansi/package.json generated vendored Normal file
View File

@@ -0,0 +1,69 @@
{
"name": "wrap-ansi",
"version": "9.0.2",
"description": "Wordwrap a string with ANSI escape codes",
"license": "MIT",
"repository": "chalk/wrap-ansi",
"funding": "https://github.com/chalk/wrap-ansi?sponsor=1",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"type": "module",
"exports": {
"types": "./index.d.ts",
"default": "./index.js"
},
"engines": {
"node": ">=18"
},
"scripts": {
"test": "xo && nyc ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"wrap",
"break",
"wordwrap",
"wordbreak",
"linewrap",
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"log",
"logging",
"command-line",
"text"
],
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"devDependencies": {
"ava": "^5.3.1",
"chalk": "^5.3.0",
"coveralls": "^3.1.1",
"has-ansi": "^5.0.1",
"nyc": "^15.1.0",
"tsd": "^0.29.0",
"xo": "^0.56.0"
}
}

75
Jira_helper/node_modules/wrap-ansi/readme.md generated vendored Normal file
View File

@@ -0,0 +1,75 @@
# wrap-ansi
> Wordwrap a string with [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles)
## Install
```sh
npm install wrap-ansi
```
## Usage
```js
import chalk from 'chalk';
import wrapAnsi from 'wrap-ansi';
const input = 'The quick brown ' + chalk.red('fox jumped over ') +
'the lazy ' + chalk.green('dog and then ran away with the unicorn.');
console.log(wrapAnsi(input, 20));
```
<img width="331" src="screenshot.png">
## API
### wrapAnsi(string, columns, options?)
Wrap words to the specified column width.
#### string
Type: `string`
A string with ANSI escape codes, like one styled by [`chalk`](https://github.com/chalk/chalk).
Newline characters will be normalized to `\n`.
#### columns
Type: `number`
The number of columns to wrap the text to.
#### options
Type: `object`
##### hard
Type: `boolean`\
Default: `false`
By default the wrap is soft, meaning long words may extend past the column width. Setting this to `true` will make it hard wrap at the column width.
##### wordWrap
Type: `boolean`\
Default: `true`
By default, an attempt is made to split words at spaces, ensuring that they don't extend past the configured columns. If wordWrap is `false`, each column will instead be completely filled splitting words as necessary.
##### trim
Type: `boolean`\
Default: `true`
Whitespace on all lines is removed by default. Set this option to `false` if you don't want to trim.
## Related
- [slice-ansi](https://github.com/chalk/slice-ansi) - Slice a string with ANSI escape codes
- [cli-truncate](https://github.com/sindresorhus/cli-truncate) - Truncate a string to a specific width in the terminal
- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right
- [jsesc](https://github.com/mathiasbynens/jsesc) - Generate ASCII-only output from Unicode strings. Useful for creating test fixtures.

100
Jira_helper/node_modules/y18n/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,100 @@
# Change Log
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
### [5.0.8](https://www.github.com/yargs/y18n/compare/v5.0.7...v5.0.8) (2021-04-07)
### Bug Fixes
* **deno:** force modern release for Deno ([b1c215a](https://www.github.com/yargs/y18n/commit/b1c215aed714bee5830e76de3e335504dc2c4dab))
### [5.0.7](https://www.github.com/yargs/y18n/compare/v5.0.6...v5.0.7) (2021-04-07)
### Bug Fixes
* **deno:** force release for deno ([#121](https://www.github.com/yargs/y18n/issues/121)) ([d3f2560](https://www.github.com/yargs/y18n/commit/d3f2560e6cedf2bfa2352e9eec044da53f9a06b2))
### [5.0.6](https://www.github.com/yargs/y18n/compare/v5.0.5...v5.0.6) (2021-04-05)
### Bug Fixes
* **webpack:** skip readFileSync if not defined ([#117](https://www.github.com/yargs/y18n/issues/117)) ([6966fa9](https://www.github.com/yargs/y18n/commit/6966fa91d2881cc6a6c531e836099e01f4da1616))
### [5.0.5](https://www.github.com/yargs/y18n/compare/v5.0.4...v5.0.5) (2020-10-25)
### Bug Fixes
* address prototype pollution issue ([#108](https://www.github.com/yargs/y18n/issues/108)) ([a9ac604](https://www.github.com/yargs/y18n/commit/a9ac604abf756dec9687be3843e2c93bfe581f25))
### [5.0.4](https://www.github.com/yargs/y18n/compare/v5.0.3...v5.0.4) (2020-10-16)
### Bug Fixes
* **exports:** node 13.0 and 13.1 require the dotted object form _with_ a string fallback ([#105](https://www.github.com/yargs/y18n/issues/105)) ([4f85d80](https://www.github.com/yargs/y18n/commit/4f85d80dbaae6d2c7899ae394f7ad97805df4886))
### [5.0.3](https://www.github.com/yargs/y18n/compare/v5.0.2...v5.0.3) (2020-10-16)
### Bug Fixes
* **exports:** node 13.0-13.6 require a string fallback ([#103](https://www.github.com/yargs/y18n/issues/103)) ([e39921e](https://www.github.com/yargs/y18n/commit/e39921e1017f88f5d8ea97ddea854ffe92d68e74))
### [5.0.2](https://www.github.com/yargs/y18n/compare/v5.0.1...v5.0.2) (2020-10-01)
### Bug Fixes
* **deno:** update types for deno ^1.4.0 ([#100](https://www.github.com/yargs/y18n/issues/100)) ([3834d9a](https://www.github.com/yargs/y18n/commit/3834d9ab1332f2937c935ada5e76623290efae81))
### [5.0.1](https://www.github.com/yargs/y18n/compare/v5.0.0...v5.0.1) (2020-09-05)
### Bug Fixes
* main had old index path ([#98](https://www.github.com/yargs/y18n/issues/98)) ([124f7b0](https://www.github.com/yargs/y18n/commit/124f7b047ba9596bdbdf64459988304e77f3de1b))
## [5.0.0](https://www.github.com/yargs/y18n/compare/v4.0.0...v5.0.0) (2020-09-05)
### ⚠ BREAKING CHANGES
* exports maps are now used, which modifies import behavior.
* drops Node 6 and 4. begin following Node.js LTS schedule (#89)
### Features
* add support for ESM and Deno [#95](https://www.github.com/yargs/y18n/issues/95)) ([4d7ae94](https://www.github.com/yargs/y18n/commit/4d7ae94bcb42e84164e2180366474b1cd321ed94))
### Build System
* drops Node 6 and 4. begin following Node.js LTS schedule ([#89](https://www.github.com/yargs/y18n/issues/89)) ([3cc0c28](https://www.github.com/yargs/y18n/commit/3cc0c287240727b84eaf1927f903612ec80f5e43))
### 4.0.1 (2020-10-25)
### Bug Fixes
* address prototype pollution issue ([#108](https://www.github.com/yargs/y18n/issues/108)) ([a9ac604](https://www.github.com/yargs/y18n/commit/7de58ca0d315990cdb38234e97fc66254cdbcd71))
## [4.0.0](https://github.com/yargs/y18n/compare/v3.2.1...v4.0.0) (2017-10-10)
### Bug Fixes
* allow support for falsy values like 0 in tagged literal ([#45](https://github.com/yargs/y18n/issues/45)) ([c926123](https://github.com/yargs/y18n/commit/c926123))
### Features
* **__:** added tagged template literal support ([#44](https://github.com/yargs/y18n/issues/44)) ([0598daf](https://github.com/yargs/y18n/commit/0598daf))
### BREAKING CHANGES
* **__:** dropping Node 0.10/Node 0.12 support

13
Jira_helper/node_modules/y18n/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,13 @@
Copyright (c) 2015, Contributors
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.

127
Jira_helper/node_modules/y18n/README.md generated vendored Normal file
View File

@@ -0,0 +1,127 @@
# y18n
[![NPM version][npm-image]][npm-url]
[![js-standard-style][standard-image]][standard-url]
[![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-yellow.svg)](https://conventionalcommits.org)
The bare-bones internationalization library used by yargs.
Inspired by [i18n](https://www.npmjs.com/package/i18n).
## Examples
_simple string translation:_
```js
const __ = require('y18n')().__;
console.log(__('my awesome string %s', 'foo'));
```
output:
`my awesome string foo`
_using tagged template literals_
```js
const __ = require('y18n')().__;
const str = 'foo';
console.log(__`my awesome string ${str}`);
```
output:
`my awesome string foo`
_pluralization support:_
```js
const __n = require('y18n')().__n;
console.log(__n('one fish %s', '%d fishes %s', 2, 'foo'));
```
output:
`2 fishes foo`
## Deno Example
As of `v5` `y18n` supports [Deno](https://github.com/denoland/deno):
```typescript
import y18n from "https://deno.land/x/y18n/deno.ts";
const __ = y18n({
locale: 'pirate',
directory: './test/locales'
}).__
console.info(__`Hi, ${'Ben'} ${'Coe'}!`)
```
You will need to run with `--allow-read` to load alternative locales.
## JSON Language Files
The JSON language files should be stored in a `./locales` folder.
File names correspond to locales, e.g., `en.json`, `pirate.json`.
When strings are observed for the first time they will be
added to the JSON file corresponding to the current locale.
## Methods
### require('y18n')(config)
Create an instance of y18n with the config provided, options include:
* `directory`: the locale directory, default `./locales`.
* `updateFiles`: should newly observed strings be updated in file, default `true`.
* `locale`: what locale should be used.
* `fallbackToLanguage`: should fallback to a language-only file (e.g. `en.json`)
be allowed if a file matching the locale does not exist (e.g. `en_US.json`),
default `true`.
### y18n.\_\_(str, arg, arg, arg)
Print a localized string, `%s` will be replaced with `arg`s.
This function can also be used as a tag for a template literal. You can use it
like this: <code>__&#96;hello ${'world'}&#96;</code>. This will be equivalent to
`__('hello %s', 'world')`.
### y18n.\_\_n(singularString, pluralString, count, arg, arg, arg)
Print a localized string with appropriate pluralization. If `%d` is provided
in the string, the `count` will replace this placeholder.
### y18n.setLocale(str)
Set the current locale being used.
### y18n.getLocale()
What locale is currently being used?
### y18n.updateLocale(obj)
Update the current locale with the key value pairs in `obj`.
## Supported Node.js Versions
Libraries in this ecosystem make a best effort to track
[Node.js' release schedule](https://nodejs.org/en/about/releases/). Here's [a
post on why we think this is important](https://medium.com/the-node-js-collection/maintainers-should-consider-following-node-js-release-schedule-ab08ed4de71a).
## License
ISC
[npm-url]: https://npmjs.org/package/y18n
[npm-image]: https://img.shields.io/npm/v/y18n.svg
[standard-image]: https://img.shields.io/badge/code%20style-standard-brightgreen.svg
[standard-url]: https://github.com/feross/standard

203
Jira_helper/node_modules/y18n/build/index.cjs generated vendored Normal file
View File

@@ -0,0 +1,203 @@
'use strict';
var fs = require('fs');
var util = require('util');
var path = require('path');
let shim;
class Y18N {
constructor(opts) {
// configurable options.
opts = opts || {};
this.directory = opts.directory || './locales';
this.updateFiles = typeof opts.updateFiles === 'boolean' ? opts.updateFiles : true;
this.locale = opts.locale || 'en';
this.fallbackToLanguage = typeof opts.fallbackToLanguage === 'boolean' ? opts.fallbackToLanguage : true;
// internal stuff.
this.cache = Object.create(null);
this.writeQueue = [];
}
__(...args) {
if (typeof arguments[0] !== 'string') {
return this._taggedLiteral(arguments[0], ...arguments);
}
const str = args.shift();
let cb = function () { }; // start with noop.
if (typeof args[args.length - 1] === 'function')
cb = args.pop();
cb = cb || function () { }; // noop.
if (!this.cache[this.locale])
this._readLocaleFile();
// we've observed a new string, update the language file.
if (!this.cache[this.locale][str] && this.updateFiles) {
this.cache[this.locale][str] = str;
// include the current directory and locale,
// since these values could change before the
// write is performed.
this._enqueueWrite({
directory: this.directory,
locale: this.locale,
cb
});
}
else {
cb();
}
return shim.format.apply(shim.format, [this.cache[this.locale][str] || str].concat(args));
}
__n() {
const args = Array.prototype.slice.call(arguments);
const singular = args.shift();
const plural = args.shift();
const quantity = args.shift();
let cb = function () { }; // start with noop.
if (typeof args[args.length - 1] === 'function')
cb = args.pop();
if (!this.cache[this.locale])
this._readLocaleFile();
let str = quantity === 1 ? singular : plural;
if (this.cache[this.locale][singular]) {
const entry = this.cache[this.locale][singular];
str = entry[quantity === 1 ? 'one' : 'other'];
}
// we've observed a new string, update the language file.
if (!this.cache[this.locale][singular] && this.updateFiles) {
this.cache[this.locale][singular] = {
one: singular,
other: plural
};
// include the current directory and locale,
// since these values could change before the
// write is performed.
this._enqueueWrite({
directory: this.directory,
locale: this.locale,
cb
});
}
else {
cb();
}
// if a %d placeholder is provided, add quantity
// to the arguments expanded by util.format.
const values = [str];
if (~str.indexOf('%d'))
values.push(quantity);
return shim.format.apply(shim.format, values.concat(args));
}
setLocale(locale) {
this.locale = locale;
}
getLocale() {
return this.locale;
}
updateLocale(obj) {
if (!this.cache[this.locale])
this._readLocaleFile();
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
this.cache[this.locale][key] = obj[key];
}
}
}
_taggedLiteral(parts, ...args) {
let str = '';
parts.forEach(function (part, i) {
const arg = args[i + 1];
str += part;
if (typeof arg !== 'undefined') {
str += '%s';
}
});
return this.__.apply(this, [str].concat([].slice.call(args, 1)));
}
_enqueueWrite(work) {
this.writeQueue.push(work);
if (this.writeQueue.length === 1)
this._processWriteQueue();
}
_processWriteQueue() {
const _this = this;
const work = this.writeQueue[0];
// destructure the enqueued work.
const directory = work.directory;
const locale = work.locale;
const cb = work.cb;
const languageFile = this._resolveLocaleFile(directory, locale);
const serializedLocale = JSON.stringify(this.cache[locale], null, 2);
shim.fs.writeFile(languageFile, serializedLocale, 'utf-8', function (err) {
_this.writeQueue.shift();
if (_this.writeQueue.length > 0)
_this._processWriteQueue();
cb(err);
});
}
_readLocaleFile() {
let localeLookup = {};
const languageFile = this._resolveLocaleFile(this.directory, this.locale);
try {
// When using a bundler such as webpack, readFileSync may not be defined:
if (shim.fs.readFileSync) {
localeLookup = JSON.parse(shim.fs.readFileSync(languageFile, 'utf-8'));
}
}
catch (err) {
if (err instanceof SyntaxError) {
err.message = 'syntax error in ' + languageFile;
}
if (err.code === 'ENOENT')
localeLookup = {};
else
throw err;
}
this.cache[this.locale] = localeLookup;
}
_resolveLocaleFile(directory, locale) {
let file = shim.resolve(directory, './', locale + '.json');
if (this.fallbackToLanguage && !this._fileExistsSync(file) && ~locale.lastIndexOf('_')) {
// attempt fallback to language only
const languageFile = shim.resolve(directory, './', locale.split('_')[0] + '.json');
if (this._fileExistsSync(languageFile))
file = languageFile;
}
return file;
}
_fileExistsSync(file) {
return shim.exists(file);
}
}
function y18n$1(opts, _shim) {
shim = _shim;
const y18n = new Y18N(opts);
return {
__: y18n.__.bind(y18n),
__n: y18n.__n.bind(y18n),
setLocale: y18n.setLocale.bind(y18n),
getLocale: y18n.getLocale.bind(y18n),
updateLocale: y18n.updateLocale.bind(y18n),
locale: y18n.locale
};
}
var nodePlatformShim = {
fs: {
readFileSync: fs.readFileSync,
writeFile: fs.writeFile
},
format: util.format,
resolve: path.resolve,
exists: (file) => {
try {
return fs.statSync(file).isFile();
}
catch (err) {
return false;
}
}
};
const y18n = (opts) => {
return y18n$1(opts, nodePlatformShim);
};
module.exports = y18n;

Some files were not shown because too many files have changed in this diff Show More