File tree Expand file tree Collapse file tree 3 files changed +16
-10
lines changed Expand file tree Collapse file tree 3 files changed +16
-10
lines changed Original file line number Diff line number Diff line change 11
11
" api" ,
12
12
" human-readable"
13
13
],
14
- "version" : " 0.0.7 " ,
14
+ "version" : " 0.0.8 " ,
15
15
"homepage" : " https://github.com/eserozvataf/temporal-parse#readme" ,
16
16
"author" :
" Eser Ozvataf <[email protected] >" ,
17
17
"contributors" : [
Original file line number Diff line number Diff line change @@ -16,6 +16,12 @@ Deno.test(function tokenizeDateTest() {
16
16
} ) ;
17
17
} ) ;
18
18
19
+ Deno . test ( function falseValues ( ) {
20
+ const parsedDate = parseDate ( "not a date" , "generic-europe-asia" ) ;
21
+
22
+ asserts . assertEquals ( parsedDate , undefined ) ;
23
+ } ) ;
24
+
19
25
Deno . test ( function dateComparision ( ) {
20
26
for ( let i = 0 ; i < dateSamplesEA . length ; i ++ ) {
21
27
const dateEA = dateSamplesEA [ i ] ;
@@ -24,11 +30,11 @@ Deno.test(function dateComparision() {
24
30
const dateUS = dateSamplesUS [ i ] ;
25
31
const dateUSParsed = parseDate ( dateUS , "generic-american" ) ;
26
32
27
- console . log (
28
- `dateEA: ${ dateEA } , dateUS: ${ dateUS } ` ,
29
- dateEAParsed ,
30
- dateUSParsed ,
31
- ) ;
33
+ // console.log(
34
+ // `dateEA: ${dateEA}, dateUS: ${dateUS}`,
35
+ // dateEAParsed,
36
+ // dateUSParsed,
37
+ // );
32
38
asserts . assertEquals (
33
39
toDate ( dateEAParsed ! ) ,
34
40
toDate ( dateUSParsed ! ) ,
Original file line number Diff line number Diff line change @@ -421,7 +421,7 @@ const tokenizeFormat = function tokenizeFormat(input: string): FormatToken[] {
421
421
// console.log(currentToken, tokens.at(-1));
422
422
if (
423
423
tokenType === FormatTokenType . literal &&
424
- tokens . at ( - 1 ) ?. [ 0 ] === FormatTokenType . literal
424
+ tokens . slice ( - 1 ) ?. [ 0 ] ?. [ 0 ] === FormatTokenType . literal
425
425
) {
426
426
tokens [ tokens . length - 1 ] [ 1 ] += currentToken [ 1 ] ;
427
427
@@ -436,7 +436,7 @@ const tokenizeFormat = function tokenizeFormat(input: string): FormatToken[] {
436
436
for ( let i = 0 ; i < input . length ; i ++ ) {
437
437
const char = input [ i ] ;
438
438
439
- if ( inQuote === undefined && currentToken [ 1 ] . at ( - 1 ) !== char ) {
439
+ if ( inQuote === undefined && currentToken [ 1 ] . slice ( - 1 ) !== char ) {
440
440
if ( currentToken [ 1 ] . length > 0 ) {
441
441
pushToken ( ) ;
442
442
}
@@ -485,9 +485,9 @@ const formatTokenizerToRegExp = function formatTokenizerToRegExp(
485
485
}
486
486
487
487
let nextChar = "$" ;
488
- const peek = arr . at ( idx + 1 ) ;
488
+ const peek = arr [ idx + 1 ] ;
489
489
if ( peek !== undefined && peek [ 0 ] === FormatTokenType . literal ) {
490
- nextChar = escapeRegExp ( peek [ 1 ] . at ( 0 ) ! ) ;
490
+ nextChar = escapeRegExp ( peek [ 1 ] [ 0 ] ! ) ;
491
491
}
492
492
493
493
const symbol = symbols [ curr [ 1 ] ] ;
You can’t perform that action at this time.
0 commit comments