Skip to content

Commit

Permalink
Refactor credentials types to increase end user's type safety
Browse files Browse the repository at this point in the history
  • Loading branch information
Or Zarchi committed Apr 1, 2023
1 parent 8830e4b commit 84a2048
Show file tree
Hide file tree
Showing 21 changed files with 106 additions and 72 deletions.
2 changes: 1 addition & 1 deletion src/scrapers/amex.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ describe('AMEX legacy scraper', () => {

const scraper = new AMEXScraper(options);

const result = await scraper.scrape({ username: 'e10s12', password: '3f3ss3d' });
const result = await scraper.scrape({ id: 'e10s12', card6Digits: '123456', password: '3f3ss3d' });

expect(result).toBeDefined();
expect(result.success).toBeFalsy();
Expand Down
9 changes: 5 additions & 4 deletions src/scrapers/base-beinleumi-group.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import { SHEKEL_CURRENCY } from '../constants';
import {
TransactionsAccount, Transaction, TransactionStatuses, TransactionTypes,
} from '../transactions';
import { ScraperCredentials } from './interface';

const DATE_FORMAT = 'DD/MM/YYYY';
const NO_TRANSACTION_IN_DATE_RANGE_TEXT = 'לא נמצאו נתונים בנושא המבוקש';
Expand Down Expand Up @@ -55,7 +54,7 @@ export function getPossibleLoginResults(): PossibleLoginResults {
return urls;
}

export function createLoginFields(credentials: ScraperCredentials) {
export function createLoginFields(credentials: ScraperSpecificCredentials) {
return [
{ selector: '#username', value: credentials.username },
{ selector: '#password', value: credentials.password },
Expand Down Expand Up @@ -290,14 +289,16 @@ async function fetchAccounts(page: Page, startDate: Moment) {
return accounts;
}

class BeinleumiGroupBaseScraper extends BaseScraperWithBrowser {
type ScraperSpecificCredentials = {username: string, password: string};

class BeinleumiGroupBaseScraper extends BaseScraperWithBrowser<ScraperSpecificCredentials> {
BASE_URL = '';

LOGIN_URL = '';

TRANSACTIONS_URL = '';

getLoginOptions(credentials: ScraperCredentials) {
getLoginOptions(credentials: ScraperSpecificCredentials) {
return {
loginUrl: `${this.LOGIN_URL}`,
fields: createLoginFields(credentials),
Expand Down
8 changes: 4 additions & 4 deletions src/scrapers/base-isracard-amex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import {
import { getDebug } from '../helpers/debug';
import { runSerial } from '../helpers/waiting';
import { ScraperErrorTypes } from './errors';
import { ScraperScrapingResult, ScraperCredentials, ScraperOptions } from './interface';
import { ScraperScrapingResult, ScraperOptions } from './interface';

const COUNTRY_CODE = '212';
const ID_TYPE = '1';
Expand Down Expand Up @@ -321,8 +321,8 @@ async function fetchAllTransactions(page: Page, options: ExtendedScraperOptions,
};
}


class IsracardAmexBaseScraper extends BaseScraperWithBrowser {
type ScraperSpecificCredentials = {id: string, password: string, card6Digits: string};
class IsracardAmexBaseScraper extends BaseScraperWithBrowser<ScraperSpecificCredentials> {
private baseUrl: string;

private companyCode: string;
Expand All @@ -337,7 +337,7 @@ class IsracardAmexBaseScraper extends BaseScraperWithBrowser {
this.servicesUrl = `${baseUrl}/services/ProxyRequestHandler.ashx`;
}

async login(credentials: ScraperCredentials): Promise<ScraperScrapingResult> {
async login(credentials: ScraperSpecificCredentials): Promise<ScraperScrapingResult> {
await this.page.setRequestInterception(true);
this.page.on('request', (request) => {
if (request.url().includes('detector-dom.min.js')) {
Expand Down
4 changes: 2 additions & 2 deletions src/scrapers/base-scraper-with-browser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ async function getKeyByValue(object: PossibleLoginResults, value: string, page:
return Promise.resolve(LoginResults.UnknownError);
}

function handleLoginResult(scraper: BaseScraperWithBrowser, loginResult: LoginResults) {
function handleLoginResult(scraper: BaseScraperWithBrowser<ScraperCredentials>, loginResult: LoginResults) {
switch (loginResult) {
case LoginResults.Success:
scraper.emitProgress(ScraperProgressTypes.LoginSuccess);
Expand Down Expand Up @@ -108,7 +108,7 @@ function createGeneralError(): ScraperScrapingResult {
};
}

class BaseScraperWithBrowser extends BaseScraper {
class BaseScraperWithBrowser<TCredentials extends ScraperCredentials> extends BaseScraper<TCredentials> {
// NOTICE - it is discourage to use bang (!) in general. It is used here because
// all the classes that inherit from this base assume is it mandatory.
protected browser!: Browser;
Expand Down
6 changes: 3 additions & 3 deletions src/scrapers/base-scraper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ export enum ScraperProgressTypes {
Terminating = 'TERMINATING',
}

export class BaseScraper implements Scraper {
export class BaseScraper<TCredentials extends ScraperCredentials> implements Scraper<TCredentials> {
private eventEmitter = new EventEmitter();

constructor(public options: ScraperOptions) {
Expand All @@ -38,7 +38,7 @@ export class BaseScraper implements Scraper {
moment.tz.setDefault('Asia/Jerusalem');
}

async scrape(credentials: ScraperCredentials): Promise<ScraperScrapingResult> {
async scrape(credentials: TCredentials): Promise<ScraperScrapingResult> {
this.emitProgress(ScraperProgressTypes.StartScraping);
await this.initialize();

Expand Down Expand Up @@ -87,7 +87,7 @@ export class BaseScraper implements Scraper {
}

// eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/require-await
async login(_credentials: ScraperCredentials): Promise<ScraperLoginResult> {
async login(_credentials: TCredentials): Promise<ScraperLoginResult> {
throw new Error(`login() is not created in ${this.options.companyId}`);
}

Expand Down
10 changes: 6 additions & 4 deletions src/scrapers/beyahad-bishvilha.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import {
SHEKEL_CURRENCY,
SHEKEL_CURRENCY_SYMBOL,
} from '../constants';
import { ScraperCredentials, ScraperOptions } from './interface';
import { ScraperOptions } from './interface';

const debug = getDebug('beyahadBishvilha');

Expand Down Expand Up @@ -137,22 +137,24 @@ function getPossibleLoginResults(): PossibleLoginResults {
return urls;
}

function createLoginFields(credentials: ScraperCredentials) {
function createLoginFields(credentials: ScraperSpecificCredentials) {
return [
{ selector: '#loginId', value: credentials.id },
{ selector: '#loginPassword', value: credentials.password },
];
}

class BeyahadBishvilhaScraper extends BaseScraperWithBrowser {
type ScraperSpecificCredentials = { id: string, password: string };

class BeyahadBishvilhaScraper extends BaseScraperWithBrowser<ScraperSpecificCredentials> {
protected getViewPort(): { width: number, height: number } {
return {
width: 1500,
height: 800,
};
}

getLoginOptions(credentials: ScraperCredentials) {
getLoginOptions(credentials: ScraperSpecificCredentials) {
return {
loginUrl: LOGIN_URL,
fields: createLoginFields(credentials),
Expand Down
2 changes: 1 addition & 1 deletion src/scrapers/discount.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ describe('Discount legacy scraper', () => {

const scraper = new DiscountScraper(options);

const result = await scraper.scrape({ username: 'e10s12', password: '3f3ss3d' });
const result = await scraper.scrape({ id: 'e10s12', password: '3f3ss3d', num: '1234' });

expect(result).toBeDefined();
expect(result.success).toBeFalsy();
Expand Down
10 changes: 6 additions & 4 deletions src/scrapers/discount.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import {
Transaction, TransactionStatuses, TransactionTypes,
} from '../transactions';
import { ScraperErrorTypes } from './errors';
import { ScraperScrapingResult, ScraperCredentials, ScraperOptions } from './interface';
import { ScraperScrapingResult, ScraperOptions } from './interface';

const BASE_URL = 'https://start.telebank.co.il';
const DATE_FORMAT = 'YYYYMMDD';
Expand Down Expand Up @@ -126,16 +126,18 @@ function getPossibleLoginResults(): PossibleLoginResults {
return urls;
}

function createLoginFields(credentials: ScraperCredentials) {
function createLoginFields(credentials: ScraperSpecificCredentials) {
return [
{ selector: '#tzId', value: credentials.id },
{ selector: '#tzPassword', value: credentials.password },
{ selector: '#aidnum', value: credentials.num },
];
}

class DiscountScraper extends BaseScraperWithBrowser {
getLoginOptions(credentials: ScraperCredentials) {
type ScraperSpecificCredentials = { id: string, password: string, num: string };

class DiscountScraper extends BaseScraperWithBrowser<ScraperSpecificCredentials> {
getLoginOptions(credentials: ScraperSpecificCredentials) {
return {
loginUrl: `${BASE_URL}/login/#/LOGIN_PAGE`,
checkReadiness: async () => waitUntilElementFound(this.page, '#tzId'),
Expand Down
4 changes: 2 additions & 2 deletions src/scrapers/factory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@ import UnionBankScraper from './union-bank';
import BeinleumiScraper from './beinleumi';
import MassadScraper from './massad';
import YahavScraper from './yahav';
import { Scraper, ScraperOptions } from './interface';
import { Scraper, ScraperCredentials, ScraperOptions } from './interface';
import { CompanyTypes } from '../definitions';
import BeyahadBishvilhaScraper from './beyahad-bishvilha';
import OneZeroScraper from './one-zero';

export default function createScraper(options: ScraperOptions): Scraper {
export default function createScraper(options: ScraperOptions): Scraper<ScraperCredentials> {
switch (options.companyId) {
case CompanyTypes.hapoalim:
return new HapoalimScraper(options);
Expand Down
2 changes: 1 addition & 1 deletion src/scrapers/hapoalim.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ describe('Hapoalim legacy scraper', () => {

const scraper = new HapoalimScraper(options);

const result = await scraper.scrape({ username: 'e10s12', password: '3f3ss3d' });
const result = await scraper.scrape({ userCode: 'e10s12', password: '3f3ss3d' });

expect(result).toBeDefined();
expect(result.success).toBeFalsy();
Expand Down
13 changes: 9 additions & 4 deletions src/scrapers/hapoalim.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import {
TransactionsAccount, Transaction, TransactionStatuses, TransactionTypes,
} from '../transactions';
import { getDebug } from '../helpers/debug';
import { ScraperCredentials, ScraperOptions } from './interface';
import { ScraperOptions } from './interface';

const debug = getDebug('hapoalim');

Expand Down Expand Up @@ -211,20 +211,25 @@ function getPossibleLoginResults(baseUrl: string) {
return urls;
}

function createLoginFields(credentials: ScraperCredentials) {
function createLoginFields(credentials: ScraperSpecificCredentials) {
if (!('userCode' in credentials)) {
throw new Error('Missing or invalid credentials');
}
return [
{ selector: '#userCode', value: credentials.userCode },
{ selector: '#password', value: credentials.password },
];
}

class HapoalimScraper extends BaseScraperWithBrowser {
type ScraperSpecificCredentials = { userCode: string, password: string };

class HapoalimScraper extends BaseScraperWithBrowser<ScraperSpecificCredentials> {
// eslint-disable-next-line class-methods-use-this
get baseUrl() {
return 'https://login.bankhapoalim.co.il';
}

getLoginOptions(credentials: ScraperCredentials) {
getLoginOptions(credentials: ScraperSpecificCredentials) {
return {
loginUrl: `${this.baseUrl}/cgi-bin/poalwwwc?reqName=getLogonPage`,
fields: createLoginFields(credentials),
Expand Down
26 changes: 16 additions & 10 deletions src/scrapers/interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,20 @@ import { CompanyTypes } from '../definitions';
import { TransactionsAccount } from '../transactions';
import { ErrorResult, ScraperErrorTypes } from './errors';

export type ScraperCredentials = {
[key: string]: string;
} & ({
otpCodeRetriever?: () => Promise<string>;
phoneNumber?: string;
} | {
otpLongTermToken?: string;
});
// TODO: Deduplicate similar looking credentials types
export type ScraperCredentials =
{userCode: string, password: string} |
{username: string, password: string} |
{id: string, password: string} |
{id: string, password: string, num: string} |
{id: string, password: string, card6Digits: string} |
{username: string, nationalID: string, password: string} |
({email: string, password: string} & ({
otpCodeRetriever: () => Promise<string>;
phoneNumber: string;
} | {
otpLongTermToken: string;
}));

export interface FutureDebit {
amount: number;
Expand Down Expand Up @@ -128,8 +134,8 @@ export interface ScraperScrapingResult {
errorMessage?: string; // only on success=false
}

export interface Scraper {
scrape(credentials: ScraperCredentials): Promise<ScraperScrapingResult>;
export interface Scraper<TCredentials extends ScraperCredentials> {
scrape(credentials: TCredentials): Promise<ScraperScrapingResult>;
}

export type ScraperTwoFactorAuthTriggerResult = ErrorResult | {
Expand Down
2 changes: 1 addition & 1 deletion src/scrapers/isracard.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ describe('Isracard legacy scraper', () => {

const scraper = new IsracardScraper(options);

const result = await scraper.scrape({ username: 'e10s12', password: '3f3ss3d' });
const result = await scraper.scrape({ id: 'e10s12', password: '3f3ss3d', card6Digits: '123456' });

expect(result).toBeDefined();
expect(result.success).toBeFalsy();
Expand Down
10 changes: 6 additions & 4 deletions src/scrapers/leumi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { SHEKEL_CURRENCY } from '../constants';
import {
TransactionsAccount, Transaction, TransactionStatuses, TransactionTypes,
} from '../transactions';
import { ScraperScrapingResult, ScraperCredentials } from './interface';
import { ScraperScrapingResult } from './interface';
import { waitForNavigation } from '../helpers/navigation';

const BASE_URL = 'https://hb2.bankleumi.co.il';
Expand Down Expand Up @@ -56,7 +56,7 @@ function getPossibleLoginResults() {
return urls;
}

function createLoginFields(credentials: ScraperCredentials) {
function createLoginFields(credentials: ScraperSpecificCredentials) {
return [
{ selector: 'input[placeholder="שם משתמש"]', value: credentials.username },
{ selector: 'input[placeholder="סיסמה"]', value: credentials.password },
Expand Down Expand Up @@ -208,8 +208,10 @@ async function waitForPostLogin(page: Page): Promise<void> {
]);
}

class LeumiScraper extends BaseScraperWithBrowser {
getLoginOptions(credentials: ScraperCredentials) {
type ScraperSpecificCredentials = { username: string, password: string};

class LeumiScraper extends BaseScraperWithBrowser<ScraperSpecificCredentials> {
getLoginOptions(credentials: ScraperSpecificCredentials) {
return {
loginUrl: LOGIN_URL,
fields: createLoginFields(credentials),
Expand Down
10 changes: 6 additions & 4 deletions src/scrapers/max.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import getAllMonthMoments from '../helpers/dates';
import { fixInstallments, sortTransactionsByDate, filterOldTransactions } from '../helpers/transactions';
import { Transaction, TransactionStatuses, TransactionTypes } from '../transactions';
import { getDebug } from '../helpers/debug';
import { ScraperCredentials, ScraperOptions } from './interface';
import { ScraperOptions } from './interface';

const debug = getDebug('max');

Expand Down Expand Up @@ -262,15 +262,17 @@ function getPossibleLoginResults(page: Page): PossibleLoginResults {
return urls;
}

function createLoginFields(credentials: ScraperCredentials) {
function createLoginFields(credentials: ScraperSpecificCredentials) {
return [
{ selector: '#user-name', value: credentials.username },
{ selector: '#password', value: credentials.password },
];
}

class MaxScraper extends BaseScraperWithBrowser {
getLoginOptions(credentials: ScraperCredentials) {
type ScraperSpecificCredentials = {username: string, password: string};

class MaxScraper extends BaseScraperWithBrowser<ScraperSpecificCredentials> {
getLoginOptions(credentials: ScraperSpecificCredentials) {
return {
loginUrl: LOGIN_URL,
fields: createLoginFields(credentials),
Expand Down
9 changes: 5 additions & 4 deletions src/scrapers/mizrahi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ import {
} from '../transactions';
import { BaseScraperWithBrowser, LoginResults, PossibleLoginResults } from './base-scraper-with-browser';
import { ScraperErrorTypes } from './errors';
import { ScraperCredentials } from './interface';

interface ScrapedTransaction {
RecTypeSpecified: boolean;
Expand Down Expand Up @@ -65,7 +64,7 @@ const checkingAccountTabHebrewName = 'עובר ושב';
const checkingAccountTabEnglishName = 'Checking Account';


function createLoginFields(credentials: ScraperCredentials) {
function createLoginFields(credentials: ScraperSpecificCredentials) {
return [
{ selector: usernameSelector, value: credentials.username },
{ selector: passwordSelector, value: credentials.password },
Expand Down Expand Up @@ -152,8 +151,10 @@ async function postLogin(page: Page) {
]);
}

class MizrahiScraper extends BaseScraperWithBrowser {
getLoginOptions(credentials: ScraperCredentials) {
type ScraperSpecificCredentials = { username: string, password: string };

class MizrahiScraper extends BaseScraperWithBrowser<ScraperSpecificCredentials> {
getLoginOptions(credentials: ScraperSpecificCredentials) {
return {
loginUrl: LOGIN_URL,
fields: createLoginFields(credentials),
Expand Down
Loading

0 comments on commit 84a2048

Please sign in to comment.